diff --git a/internal/testing/backend/certifyVuln_test.go b/internal/testing/backend/certifyVuln_test.go index 124487a884..8196017d19 100644 --- a/internal/testing/backend/certifyVuln_test.go +++ b/internal/testing/backend/certifyVuln_test.go @@ -1632,14 +1632,14 @@ func TestDeleteCertifyVuln(t *testing.T) { t.Fatalf("did not get expected query error, want: %v, got: %v", test.ExpQueryErr, err) } assert.True(t, deleted) - secondgot, err := b.CertifyVulnList(ctx, *test.Query, nil, nil) + secondGot, err := b.CertifyVulnList(ctx, *test.Query, nil, nil) if (err != nil) != test.ExpQueryErr { t.Fatalf("did not get expected query error, want: %v, got: %v", test.ExpQueryErr, err) } if err != nil { return } - assert.True(t, len(secondgot.Edges) == 1) + assert.True(t, len(secondGot.Edges) == 1) }) } } diff --git a/internal/testing/backend/hasSBOM_test.go b/internal/testing/backend/hasSBOM_test.go index ea71b2f9d3..2aba519637 100644 --- a/internal/testing/backend/hasSBOM_test.go +++ b/internal/testing/backend/hasSBOM_test.go @@ -25,6 +25,7 @@ import ( "github.com/guacsec/guac/internal/testing/ptrfrom" "github.com/guacsec/guac/internal/testing/testdata" "github.com/guacsec/guac/pkg/assembler/graphql/model" + "github.com/stretchr/testify/assert" ) type testDependency struct { @@ -3169,3 +3170,243 @@ func TestIngestHasSBOMs(t *testing.T) { }) } } + +func TestDeleteHasSBOM(t *testing.T) { + ctx := context.Background() + b := setupTest(t) + type call struct { + Sub model.PackageOrArtifactInputs + HS []*model.HasSBOMInputSpec + Inc []*model.HasSBOMIncludesInputSpec + } + tests := []struct { + Name string + InPkg []*model.PkgInputSpec + InArt []*model.ArtifactInputSpec + PkgArt *model.PackageOrArtifactInputs + IsDeps []testDependency + IsOccs []testOccurrence + Calls []call + Query *model.HasSBOMSpec + ExpHS []*model.HasSbom + ExpIngestErr bool + ExpQueryErr bool + }{ + { + Name: "Query on Package", + InPkg: []*model.PkgInputSpec{testdata.P2, testdata.P4}, + InArt: []*model.ArtifactInputSpec{testdata.A1}, + PkgArt: &model.PackageOrArtifactInputs{ + Packages: []*model.IDorPkgInput{&model.IDorPkgInput{PackageInput: testdata.P2}, &model.IDorPkgInput{PackageInput: testdata.P4}}, + Artifacts: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}}, + }, + IsDeps: []testDependency{{ + pkg: testdata.P2, + depPkg: testdata.P4, + matchType: mSpecific, + isDep: &model.IsDependencyInputSpec{ + Justification: "test justification", + }, + }}, + IsOccs: []testOccurrence{{ + Subj: &model.PackageOrSourceInput{Package: &model.IDorPkgInput{PackageInput: testdata.P4}}, + Art: testdata.A1, + isOcc: &model.IsOccurrenceInputSpec{Justification: "test justification"}, + }}, + Calls: []call{ + { + Sub: model.PackageOrArtifactInputs{ + Packages: []*model.IDorPkgInput{&model.IDorPkgInput{PackageInput: testdata.P2}, &model.IDorPkgInput{PackageInput: testdata.P4}}, + }, + HS: []*model.HasSBOMInputSpec{ + { + URI: "test uri", + }, + { + URI: "test uri", + }, + }, + }, + { + Sub: model.PackageOrArtifactInputs{ + Artifacts: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}}, + }, + HS: []*model.HasSBOMInputSpec{ + { + URI: "test uri", + }, + }, + }, + }, + Query: &model.HasSBOMSpec{ + Subject: &model.PackageOrArtifactSpec{ + Package: &model.PkgSpec{ + Version: ptrfrom.String("2.11.1"), + }, + }, + }, + ExpHS: []*model.HasSbom{ + { + Subject: testdata.P2out, + URI: "test uri", + IncludedSoftware: []model.PackageOrArtifact{testdata.P2out, testdata.P4out, testdata.A1out}, + IncludedDependencies: []*model.IsDependency{{ + Package: testdata.P2out, + DependencyPackage: testdata.P4out, + Justification: "test justification", + }}, + IncludedOccurrences: []*model.IsOccurrence{{ + Subject: testdata.P4out, + Artifact: testdata.A1out, + Justification: "test justification", + }}, + }, + }, + }, + { + Name: "Query on Artifact", + InPkg: []*model.PkgInputSpec{testdata.P1}, + InArt: []*model.ArtifactInputSpec{testdata.A1, testdata.A2}, + PkgArt: &model.PackageOrArtifactInputs{ + Packages: []*model.IDorPkgInput{&model.IDorPkgInput{PackageInput: testdata.P1}}, + Artifacts: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}, {ArtifactInput: testdata.A2}}, + }, + IsOccs: []testOccurrence{{ + Subj: &model.PackageOrSourceInput{Package: &model.IDorPkgInput{PackageInput: testdata.P1}}, + Art: testdata.A2, + isOcc: &model.IsOccurrenceInputSpec{Justification: "test justification"}, + }}, + Calls: []call{ + { + Sub: model.PackageOrArtifactInputs{ + Packages: []*model.IDorPkgInput{&model.IDorPkgInput{PackageInput: testdata.P1}}, + }, + HS: []*model.HasSBOMInputSpec{ + { + URI: "test uri", + }, + }, + }, + { + Sub: model.PackageOrArtifactInputs{ + Artifacts: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}, {ArtifactInput: testdata.A2}}, + }, + HS: []*model.HasSBOMInputSpec{ + { + URI: "test uri", + }, + { + URI: "test uri", + }, + }, + }, + }, + Query: &model.HasSBOMSpec{ + Subject: &model.PackageOrArtifactSpec{ + Artifact: &model.ArtifactSpec{ + Algorithm: ptrfrom.String("sha1"), + }, + }, + }, + ExpHS: []*model.HasSbom{ + { + Subject: testdata.A2out, + URI: "test uri", + IncludedSoftware: []model.PackageOrArtifact{testdata.P1out, testdata.A1out, testdata.A2out}, + IncludedOccurrences: []*model.IsOccurrence{{ + Subject: testdata.P1out, + Artifact: testdata.A2out, + Justification: "test justification", + }}, + }, + }, + }, + } + for _, test := range tests { + t.Run(test.Name, func(t *testing.T) { + for _, p := range test.InPkg { + if _, err := b.IngestPackage(ctx, model.IDorPkgInput{PackageInput: p}); err != nil { + t.Fatalf("Could not ingest package: %v", err) + } + } + for _, a := range test.InArt { + if _, err := b.IngestArtifact(ctx, &model.IDorArtifactInput{ArtifactInput: a}); err != nil { + t.Fatalf("Could not ingest artifact: %v", err) + } + } + includes := model.HasSBOMIncludesInputSpec{} + if test.PkgArt != nil { + if pkgs, err := b.IngestPackages(ctx, test.PkgArt.Packages); err != nil { + t.Fatalf("Could not ingest package: %v", err) + } else { + for _, pkg := range pkgs { + includes.Packages = append(includes.Packages, pkg.PackageVersionID) + } + } + if arts, err := b.IngestArtifacts(ctx, test.PkgArt.Artifacts); err != nil { + t.Fatalf("Could not ingest artifact: %v", err) + } else { + includes.Artifacts = append(includes.Artifacts, arts...) + } + } + + for _, dep := range test.IsDeps { + if isDep, err := b.IngestDependency(ctx, model.IDorPkgInput{PackageInput: dep.pkg}, model.IDorPkgInput{PackageInput: dep.depPkg}, dep.matchType, *dep.isDep); err != nil { + t.Fatalf("Could not ingest dependency: %v", err) + } else { + includes.Dependencies = append(includes.Dependencies, isDep) + } + } + + for _, occ := range test.IsOccs { + if isOcc, err := b.IngestOccurrence(ctx, *occ.Subj, model.IDorArtifactInput{ArtifactInput: occ.Art}, *occ.isOcc); err != nil { + t.Fatalf("Could not ingest occurrence: %v", err) + } else { + includes.Occurrences = append(includes.Occurrences, isOcc) + } + } + for _, o := range test.Calls { + var sbomIncludes []*model.HasSBOMIncludesInputSpec + for count := 0; count < len(o.HS); count++ { + sbomIncludes = append(sbomIncludes, &includes) + } + _, err := b.IngestHasSBOMs(ctx, o.Sub, o.HS, sbomIncludes) + if (err != nil) != test.ExpIngestErr { + t.Fatalf("did not get expected ingest error, want: %v, got: %v", test.ExpIngestErr, err) + } + if err != nil { + return + } + } + got, err := b.HasSBOMList(ctx, *test.Query, nil, nil) + if (err != nil) != test.ExpQueryErr { + t.Fatalf("did not get expected query error, want: %v, got: %v", test.ExpQueryErr, err) + } + if err != nil { + return + } + var returnedObjects []*model.HasSbom + if got != nil { + for _, obj := range got.Edges { + returnedObjects = append(returnedObjects, obj.Node) + } + } + if diff := cmp.Diff(test.ExpHS, returnedObjects, commonOpts); diff != "" { + t.Errorf("Unexpected results. (-want +got):\n%s", diff) + } + deleted, err := b.Delete(ctx, returnedObjects[0].ID) + if err != nil { + t.Fatalf("did not get expected query error, want: %v, got: %v", test.ExpQueryErr, err) + } + assert.True(t, deleted) + secondGot, err := b.HasSBOMList(ctx, *test.Query, nil, nil) + if (err != nil) != test.ExpQueryErr { + t.Fatalf("did not get expected query error, want: %v, got: %v", test.ExpQueryErr, err) + } + if err != nil { + return + } + assert.Nil(t, secondGot) + }) + } +} diff --git a/internal/testing/backend/hasSLSA_test.go b/internal/testing/backend/hasSLSA_test.go index c6108b02c9..bbc94069f1 100644 --- a/internal/testing/backend/hasSLSA_test.go +++ b/internal/testing/backend/hasSLSA_test.go @@ -25,6 +25,7 @@ import ( "github.com/guacsec/guac/internal/testing/ptrfrom" "github.com/guacsec/guac/internal/testing/testdata" "github.com/guacsec/guac/pkg/assembler/graphql/model" + "github.com/stretchr/testify/assert" ) func TestHasSLSA(t *testing.T) { @@ -925,3 +926,268 @@ func TestIngestHasSLSAs(t *testing.T) { }) } } + +func TestDeleteHasSLSAs(t *testing.T) { + ctx := context.Background() + b := setupTest(t) + type call struct { + Sub []*model.IDorArtifactInput + BF [][]*model.IDorArtifactInput + BB []*model.IDorBuilderInput + SLSA []*model.SLSAInputSpec + } + tests := []struct { + Name string + InArt []*model.ArtifactInputSpec + InBld []*model.BuilderInputSpec + Calls []call + Query *model.HasSLSASpec + ExpHS []*model.HasSlsa + ExpIngestErr bool + ExpQueryErr bool + }{ + { + Name: "HappyPath", + InArt: []*model.ArtifactInputSpec{testdata.A1, testdata.A2}, + InBld: []*model.BuilderInputSpec{testdata.B1}, + Calls: []call{ + { + Sub: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}}, + BF: [][]*model.IDorArtifactInput{{{ArtifactInput: testdata.A2}}}, + BB: []*model.IDorBuilderInput{{BuilderInput: testdata.B1}}, + SLSA: []*model.SLSAInputSpec{ + { + BuildType: "test type", + }, + }, + }, + }, + Query: &model.HasSLSASpec{ + BuildType: ptrfrom.String("test type"), + }, + ExpHS: []*model.HasSlsa{ + { + Subject: testdata.A1out, + Slsa: &model.Slsa{ + BuiltBy: testdata.B1out, + BuiltFrom: []*model.Artifact{testdata.A2out}, + BuildType: "test type", + }, + }, + }, + }, + { + Name: "Ingest twice", + InArt: []*model.ArtifactInputSpec{testdata.A1, testdata.A2}, + InBld: []*model.BuilderInputSpec{testdata.B1}, + Calls: []call{ + { + Sub: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}, {ArtifactInput: testdata.A1}}, + BF: [][]*model.IDorArtifactInput{{{ArtifactInput: testdata.A2}}, {{ArtifactInput: testdata.A2}}}, + BB: []*model.IDorBuilderInput{{BuilderInput: testdata.B1}, {BuilderInput: testdata.B1}}, + SLSA: []*model.SLSAInputSpec{ + { + BuildType: "test type", + }, + { + BuildType: "test type", + }, + }, + }, + }, + Query: &model.HasSLSASpec{ + BuildType: ptrfrom.String("test type"), + }, + ExpHS: []*model.HasSlsa{ + { + Subject: testdata.A1out, + Slsa: &model.Slsa{ + BuiltBy: testdata.B1out, + BuiltFrom: []*model.Artifact{testdata.A2out}, + BuildType: "test type", + }, + }, + }, + }, + { + Name: "Query on Build Type", + InArt: []*model.ArtifactInputSpec{testdata.A1, testdata.A2}, + InBld: []*model.BuilderInputSpec{testdata.B1}, + Calls: []call{ + { + Sub: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}, {ArtifactInput: testdata.A1}}, + BF: [][]*model.IDorArtifactInput{{{ArtifactInput: testdata.A2}}, {{ArtifactInput: testdata.A2}}}, + BB: []*model.IDorBuilderInput{{BuilderInput: testdata.B1}, {BuilderInput: testdata.B1}}, + SLSA: []*model.SLSAInputSpec{ + { + BuildType: "test type one", + }, + { + BuildType: "test type two", + }, + }, + }, + }, + Query: &model.HasSLSASpec{ + BuildType: ptrfrom.String("test type one"), + }, + ExpHS: []*model.HasSlsa{ + { + Subject: testdata.A1out, + Slsa: &model.Slsa{ + BuiltBy: testdata.B1out, + BuiltFrom: []*model.Artifact{testdata.A2out}, + BuildType: "test type one", + }, + }, + }, + }, + { + Name: "Query on Subject", + InArt: []*model.ArtifactInputSpec{testdata.A1, testdata.A2, testdata.A3}, + InBld: []*model.BuilderInputSpec{testdata.B1}, + Calls: []call{ + { + Sub: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}, {ArtifactInput: testdata.A3}}, + BF: [][]*model.IDorArtifactInput{{{ArtifactInput: testdata.A2}}, {{ArtifactInput: testdata.A2}}}, + BB: []*model.IDorBuilderInput{{BuilderInput: testdata.B1}, {BuilderInput: testdata.B1}}, + SLSA: []*model.SLSAInputSpec{ + {SlsaVersion: "test type one"}, + {}, + }, + }, + }, + Query: &model.HasSLSASpec{ + Subject: &model.ArtifactSpec{ + Algorithm: ptrfrom.String("sha256"), + Digest: ptrfrom.String("6bbb0da1891646e58eb3e6a63af3a6fc3c8eb5a0d44824cba581d2e14a0450cf"), + }, + SlsaVersion: ptrfrom.String("test type one"), + }, + ExpHS: []*model.HasSlsa{ + { + Subject: testdata.A1out, + Slsa: &model.Slsa{ + BuiltBy: testdata.B1out, + BuiltFrom: []*model.Artifact{testdata.A2out}, + SlsaVersion: "test type one", + }, + }, + }, + }, + { + Name: "Query on Materials", + InArt: []*model.ArtifactInputSpec{testdata.A1, testdata.A2, testdata.A3, testdata.A4}, + InBld: []*model.BuilderInputSpec{testdata.B1}, + Calls: []call{ + { + Sub: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}, {ArtifactInput: testdata.A1}, {ArtifactInput: testdata.A1}}, + BF: [][]*model.IDorArtifactInput{{{ArtifactInput: testdata.A2}}, {{ArtifactInput: testdata.A2}, {ArtifactInput: testdata.A3}}, {{ArtifactInput: testdata.A4}}}, + BB: []*model.IDorBuilderInput{{BuilderInput: testdata.B1}, {BuilderInput: testdata.B1}, {BuilderInput: testdata.B1}}, + SLSA: []*model.SLSAInputSpec{ + {}, + {}, + {}, + }, + }, + }, + Query: &model.HasSLSASpec{ + BuiltFrom: []*model.ArtifactSpec{{ + Digest: ptrfrom.String("5a787865sd676dacb0142afa0b83029cd7befd9"), + }}, + }, + ExpHS: []*model.HasSlsa{ + { + Subject: testdata.A1out, + Slsa: &model.Slsa{ + BuiltBy: testdata.B1out, + BuiltFrom: []*model.Artifact{testdata.A4out}, + }, + }, + }, + }, { + Name: "docref", + InArt: []*model.ArtifactInputSpec{testdata.A1, testdata.A2}, + InBld: []*model.BuilderInputSpec{testdata.B1}, + Calls: []call{ + { + Sub: []*model.IDorArtifactInput{{ArtifactInput: testdata.A1}}, + BF: [][]*model.IDorArtifactInput{{{ArtifactInput: testdata.A2}}}, + BB: []*model.IDorBuilderInput{{BuilderInput: testdata.B1}}, + SLSA: []*model.SLSAInputSpec{ + { + BuildType: "test type", + DocumentRef: "test", + }, + }, + }, + }, + Query: &model.HasSLSASpec{ + DocumentRef: ptrfrom.String("test"), + }, + ExpHS: []*model.HasSlsa{ + { + Subject: testdata.A1out, + Slsa: &model.Slsa{ + BuiltBy: testdata.B1out, + BuiltFrom: []*model.Artifact{testdata.A2out}, + BuildType: "test type", + DocumentRef: "test", + }, + }, + }, + }, + } + for _, test := range tests { + t.Run(test.Name, func(t *testing.T) { + for _, a := range test.InArt { + if _, err := b.IngestArtifact(ctx, &model.IDorArtifactInput{ArtifactInput: a}); err != nil { + t.Fatalf("Could not ingest artifact: %v", err) + } + } + for _, bld := range test.InBld { + if _, err := b.IngestBuilder(ctx, &model.IDorBuilderInput{BuilderInput: bld}); err != nil { + t.Fatalf("Could not ingest builder: %v", err) + } + } + for _, o := range test.Calls { + _, err := b.IngestSLSAs(ctx, o.Sub, o.BF, o.BB, o.SLSA) + if (err != nil) != test.ExpIngestErr { + t.Fatalf("did not get expected ingest error, want: %v, got: %v", test.ExpIngestErr, err) + } + if err != nil { + return + } + } + got, err := b.HasSLSAList(ctx, *test.Query, nil, nil) + if (err != nil) != test.ExpQueryErr { + t.Fatalf("did not get expected query error, want: %v, got: %v", test.ExpQueryErr, err) + } + if err != nil { + return + } + var returnedObjects []*model.HasSlsa + if got != nil { + for _, obj := range got.Edges { + returnedObjects = append(returnedObjects, obj.Node) + } + } + if diff := cmp.Diff(test.ExpHS, returnedObjects, commonOpts); diff != "" { + t.Errorf("Unexpected results. (-want +got):\n%s", diff) + } + deleted, err := b.Delete(ctx, returnedObjects[0].ID) + if err != nil { + t.Fatalf("did not get expected query error, want: %v, got: %v", test.ExpQueryErr, err) + } + assert.True(t, deleted) + secondGot, err := b.HasSLSAList(ctx, *test.Query, nil, nil) + if (err != nil) != test.ExpQueryErr { + t.Fatalf("did not get expected query error, want: %v, got: %v", test.ExpQueryErr, err) + } + if err != nil { + return + } + assert.Nil(t, secondGot) + }) + } +} diff --git a/internal/testing/backend/main_test.go b/internal/testing/backend/main_test.go index 95078ffbef..c2670302f1 100644 --- a/internal/testing/backend/main_test.go +++ b/internal/testing/backend/main_test.go @@ -93,9 +93,12 @@ var skipMatrix = map[string]map[string]bool{ // redis order issues "TestVEX": {arango: true, redis: true, tikv: true}, // redis order issues - "TestVEXBulkIngest": {arango: true, redis: true}, - "TestFindSoftware": {redis: true, arango: true}, + "TestVEXBulkIngest": {arango: true, redis: true}, + "TestFindSoftware": {redis: true, arango: true}, + // remove these once its implemented for the other backends "TestDeleteCertifyVuln": {arango: true, memmap: true, redis: true, tikv: true}, + "TestDeleteHasSBOM": {arango: true, memmap: true, redis: true, tikv: true}, + "TestDeleteHasSLSAs": {arango: true, memmap: true, redis: true, tikv: true}, } type backend interface { diff --git a/pkg/assembler/backends/arangodb/path.go b/pkg/assembler/backends/arangodb/path.go index a230972336..f3bfbaa65d 100644 --- a/pkg/assembler/backends/arangodb/path.go +++ b/pkg/assembler/backends/arangodb/path.go @@ -358,6 +358,8 @@ func (c *arangoClient) Nodes(ctx context.Context, nodeIDs []string) ([]model.Nod return rv, nil } +// Delete node and all associated relationships. This functionality is only implemented for +// certifyVuln, HasSBOM and HasSLSA. func (c *arangoClient) Delete(ctx context.Context, node string) (bool, error) { panic(fmt.Errorf("not implemented: Delete")) } diff --git a/pkg/assembler/backends/ent/backend/certifyVuln.go b/pkg/assembler/backends/ent/backend/certifyVuln.go index 4b86ad7ac4..8c408cdb54 100644 --- a/pkg/assembler/backends/ent/backend/certifyVuln.go +++ b/pkg/assembler/backends/ent/backend/certifyVuln.go @@ -40,7 +40,7 @@ func bulkCertifyVulnGlobalID(ids []string) []string { return toGlobalIDs(certifyvuln.Table, ids) } -func (b *EntBackend) DeleteCertifyVuln(ctx context.Context, certifyVulnID uuid.UUID) (bool, error) { +func (b *EntBackend) deleteCertifyVuln(ctx context.Context, certifyVulnID uuid.UUID) (bool, error) { _, txErr := WithinTX(ctx, b.client, func(ctx context.Context) (*string, error) { tx := ent.TxFromContext(ctx) diff --git a/pkg/assembler/backends/ent/backend/dependency.go b/pkg/assembler/backends/ent/backend/dependency.go index 7511e0b4f7..8191950e27 100644 --- a/pkg/assembler/backends/ent/backend/dependency.go +++ b/pkg/assembler/backends/ent/backend/dependency.go @@ -116,6 +116,28 @@ func getIsDepObject(q *ent.DependencyQuery) *ent.DependencyQuery { Order(ent.Asc(dependency.FieldID)) } +// getIsDepObjectWithoutEdges is used recreate the isDependency object without eager loading the edges +func getIsDepObjectWithoutEdges(q *ent.DependencyQuery) *ent.DependencyQuery { + return q. + Order(ent.Asc(dependency.FieldID)) +} + +// deleteIsDependency is called by hasSBOM to delete the isDependency nodes that are part of the hasSBOM +func (b *EntBackend) deleteIsDependency(ctx context.Context, isDependencyID uuid.UUID) error { + _, txErr := WithinTX(ctx, b.client, func(ctx context.Context) (*string, error) { + tx := ent.TxFromContext(ctx) + + if err := tx.Dependency.DeleteOneID(isDependencyID).Exec(ctx); err != nil { + return nil, errors.Wrap(err, "failed to delete isDependency with error") + } + return nil, nil + }) + if txErr != nil { + return txErr + } + return nil +} + func (b *EntBackend) IngestDependencies(ctx context.Context, pkgs []*model.IDorPkgInput, depPkgs []*model.IDorPkgInput, depPkgMatchType model.MatchFlags, dependencies []*model.IsDependencyInputSpec) ([]string, error) { funcName := "IngestDependencies" ids, txErr := WithinTX(ctx, b.client, func(ctx context.Context) (*[]string, error) { diff --git a/pkg/assembler/backends/ent/backend/neighbors.go b/pkg/assembler/backends/ent/backend/neighbors.go index 9ad2207b26..2f6c6d53d7 100644 --- a/pkg/assembler/backends/ent/backend/neighbors.go +++ b/pkg/assembler/backends/ent/backend/neighbors.go @@ -49,6 +49,8 @@ import ( "github.com/guacsec/guac/pkg/assembler/graphql/model" ) +// Delete node and all associated relationships. This functionality is only implemented for +// certifyVuln, HasSBOM and HasSLSA. func (b *EntBackend) Delete(ctx context.Context, node string) (bool, error) { foundGlobalID := fromGlobalID(node) if foundGlobalID.nodeType == "" { @@ -61,219 +63,24 @@ func (b *EntBackend) Delete(ctx context.Context, node string) (bool, error) { } switch foundGlobalID.nodeType { - // case artifact.Table: - // artifacts, err := b.Artifacts(ctx, &model.ArtifactSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for Artifacts via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(artifacts) != 1 { - // return false, fmt.Errorf("ID returned multiple Artifacts nodes %s", nodeID.String()) - // } - // return artifacts[0], nil - // case packageversion.Table: - // pv, err := b.client.PackageVersion.Query(). - // Where(packageversion.ID(nodeID)). - // WithName(func(q *ent.PackageNameQuery) {}). - // Only(ctx) - // if err != nil { - // return false, err - // } - // return toModelPackage(backReferencePackageVersion(pv)), nil - // case packagename.Table: - // pn, err := b.client.PackageName.Query(). - // Where(packagename.ID(nodeID)). - // WithVersions(). - // Only(ctx) - // if err != nil { - // return false, err - // } - // return toModelPackage(backReferencePackageName(pn)), nil - // case sourcename.Table: - // sources, err := b.Sources(ctx, &model.SourceSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for Sources via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(sources) != 1 { - // return false, fmt.Errorf("ID returned multiple Sources nodes %s", nodeID.String()) - // } - // return sources[0], nil - // case builder.Table: - // builders, err := b.Builders(ctx, &model.BuilderSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for Builders via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(builders) != 1 { - // return false, fmt.Errorf("ID returned multiple Builders nodes %s", nodeID.String()) - // } - // return builders[0], nil - // case license.Table: - // licenses, err := b.Licenses(ctx, &model.LicenseSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for Licenses via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(licenses) != 1 { - // return false, fmt.Errorf("ID returned multiple Licenses nodes %s", nodeID.String()) - // } - // return licenses[0], nil - // case vulnerabilityid.Table: - // vulnerabilities, err := b.Vulnerabilities(ctx, &model.VulnerabilitySpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for Vulnerabilities via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(vulnerabilities) != 1 { - // return false, fmt.Errorf("ID returned multiple Vulnerabilities nodes %s", nodeID.String()) - // } - // return vulnerabilities[0], nil - // case certifyBadString: - // certs, err := b.CertifyBad(ctx, &model.CertifyBadSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for CertifyBad via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(certs) != 1 { - // return false, fmt.Errorf("ID returned multiple CertifyBad nodes %s", nodeID.String()) - // } - // return certs[0], nil - // case certifyGoodString: - // certs, err := b.CertifyGood(ctx, &model.CertifyGoodSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for CertifyGood via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(certs) != 1 { - // return false, fmt.Errorf("ID returned multiple CertifyGood nodes %s", nodeID.String()) - // } - // return certs[0], nil - // case certifylegal.Table: - // legals, err := b.CertifyLegal(ctx, &model.CertifyLegalSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for CertifyLegal via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(legals) != 1 { - // return false, fmt.Errorf("ID returned multiple CertifyLegal nodes %s", nodeID.String()) - // } - // return legals[0], nil - // case certifyscorecard.Table: - // scores, err := b.Scorecards(ctx, &model.CertifyScorecardSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for scorecard via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(scores) != 1 { - // return false, fmt.Errorf("ID returned multiple scorecard nodes %s", nodeID.String()) - // } - // return scores[0], nil - // case certifyvex.Table: - // vexs, err := b.CertifyVEXStatement(ctx, &model.CertifyVEXStatementSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for CertifyVEXStatement via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(vexs) != 1 { - // return false, fmt.Errorf("ID returned multiple CertifyVEXStatement nodes %s", nodeID.String()) - // } - // return vexs[0], nil case certifyvuln.Table: - deleted, err := b.DeleteCertifyVuln(ctx, nodeID) + deleted, err := b.deleteCertifyVuln(ctx, nodeID) if err != nil { return false, fmt.Errorf("failed to delete CertifyVuln via ID: %s, with error: %w", nodeID.String(), err) } return deleted, nil - // case hashequal.Table: - // hes, err := b.HashEqual(ctx, &model.HashEqualSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for HashEqual via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(hes) != 1 { - // return false, fmt.Errorf("ID returned multiple HashEqual nodes %s", nodeID.String()) - // } - // return hes[0], nil - // case hasmetadata.Table: - // hms, err := b.HasMetadata(ctx, &model.HasMetadataSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for HasMetadata via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(hms) != 1 { - // return false, fmt.Errorf("ID returned multiple HasMetadata nodes %s", nodeID.String()) - // } - // return hms[0], nil - // case billofmaterials.Table: - // hbs, err := b.HasSBOM(ctx, &model.HasSBOMSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for HasSBOM via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(hbs) != 1 { - // return false, fmt.Errorf("ID returned multiple HasSBOM nodes %s", nodeID.String()) - // } - // return hbs[0], nil - // case slsaattestation.Table: - // slsas, err := b.HasSlsa(ctx, &model.HasSLSASpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for HasSlsa via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(slsas) != 1 { - // return false, fmt.Errorf("ID returned multiple HasSlsa nodes %s", nodeID.String()) - // } - // return slsas[0], nil - // case hassourceat.Table: - // hsas, err := b.HasSourceAt(ctx, &model.HasSourceAtSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for HasSourceAt via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(hsas) != 1 { - // return false, fmt.Errorf("ID returned multiple HasSourceAt nodes %s", nodeID.String()) - // } - // return hsas[0], nil - // case dependency.Table: - // deps, err := b.IsDependency(ctx, &model.IsDependencySpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for IsDependency via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(deps) != 1 { - // return false, fmt.Errorf("ID returned multiple IsDependency nodes %s", nodeID.String()) - // } - // return deps[0], nil - // case occurrence.Table: - // occurs, err := b.IsOccurrence(ctx, &model.IsOccurrenceSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for IsOccurrence via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(occurs) != 1 { - // return false, fmt.Errorf("ID returned multiple IsOccurrence nodes %s", nodeID.String()) - // } - // return occurs[0], nil - // case pkgequal.Table: - // pes, err := b.PkgEqual(ctx, &model.PkgEqualSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for PkgEqual via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(pes) != 1 { - // return false, fmt.Errorf("ID returned multiple PkgEqual nodes %s", nodeID.String()) - // } - // return pes[0], nil - // case pointofcontact.Table: - // pocs, err := b.PointOfContact(ctx, &model.PointOfContactSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for PointOfContact via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(pocs) != 1 { - // return false, fmt.Errorf("ID returned multiple PointOfContact nodes %s", nodeID.String()) - // } - // return pocs[0], nil - // case vulnequal.Table: - // ves, err := b.VulnEqual(ctx, &model.VulnEqualSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for VulnEqual via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(ves) != 1 { - // return false, fmt.Errorf("ID returned multiple VulnEqual nodes %s", nodeID.String()) - // } - // return ves[0], nil - // case vulnerabilitymetadata.Table: - // vms, err := b.VulnerabilityMetadata(ctx, &model.VulnerabilityMetadataSpec{ID: ptrfrom.String(nodeID.String())}) - // if err != nil { - // return false, fmt.Errorf("failed to query for VulnerabilityMetadata via ID: %s, with error: %w", nodeID.String(), err) - // } - // if len(vms) != 1 { - // return false, fmt.Errorf("ID returned multiple VulnerabilityMetadata nodes %s", nodeID.String()) - // } - // return vms[0], nil + case billofmaterials.Table: + deleted, err := b.deleteHasSbom(ctx, nodeID) + if err != nil { + return false, fmt.Errorf("failed to delete hasSBOM via ID: %s, with error: %w", nodeID.String(), err) + } + return deleted, nil + case slsaattestation.Table: + deleted, err := b.deleteSLSA(ctx, nodeID) + if err != nil { + return false, fmt.Errorf("failed to delete hasSLSA via ID: %s, with error: %w", nodeID.String(), err) + } + return deleted, nil default: log.Printf("Unknown node type: %s", foundGlobalID.nodeType) } diff --git a/pkg/assembler/backends/ent/backend/occurrence.go b/pkg/assembler/backends/ent/backend/occurrence.go index 31944d8724..a89edcfdc5 100644 --- a/pkg/assembler/backends/ent/backend/occurrence.go +++ b/pkg/assembler/backends/ent/backend/occurrence.go @@ -115,6 +115,27 @@ func getOccurrenceObject(q *ent.OccurrenceQuery) *ent.OccurrenceQuery { WithSource(func(q *ent.SourceNameQuery) {}) } +// getOccurrenceObjectWithoutEdges is used recreate the occurrence object without eager loading the edges +func getOccurrenceObjectWithoutEdges(q *ent.OccurrenceQuery) *ent.OccurrenceQuery { + return q +} + +// deleteIsOccurrences is called by hasSBOM to delete the isOccurrence nodes that are part of the hasSBOM +func (b *EntBackend) deleteIsOccurrences(ctx context.Context, isOccurrenceID uuid.UUID) error { + _, txErr := WithinTX(ctx, b.client, func(ctx context.Context) (*string, error) { + tx := ent.TxFromContext(ctx) + + if err := tx.Occurrence.DeleteOneID(isOccurrenceID).Exec(ctx); err != nil { + return nil, errors.Wrap(err, "failed to delete isOccurrenceID with error") + } + return nil, nil + }) + if txErr != nil { + return txErr + } + return nil +} + func (b *EntBackend) IngestOccurrences(ctx context.Context, subjects model.PackageOrSourceInputs, artifacts []*model.IDorArtifactInput, occurrences []*model.IsOccurrenceInputSpec) ([]string, error) { funcName := "IngestOccurrences" ids, txErr := WithinTX(ctx, b.client, func(ctx context.Context) (*[]string, error) { diff --git a/pkg/assembler/backends/ent/backend/sbom.go b/pkg/assembler/backends/ent/backend/sbom.go index 9f06225641..8dd0ce3d1b 100644 --- a/pkg/assembler/backends/ent/backend/sbom.go +++ b/pkg/assembler/backends/ent/backend/sbom.go @@ -373,6 +373,214 @@ func getSBOMObjectWithIncludes(q *ent.BillOfMaterialsQuery) *ent.BillOfMaterials }) } +// GetHasSBOMForDeletion is used by the delete function to query for isDependency and isOccurrence IDs to cascade delete +func (b *EntBackend) getHasSBOMForDeletion(ctx context.Context, hasSBOMID *string, after *string, first *int) (*model.HasSBOMConnection, error) { + var afterCursor *entgql.Cursor[uuid.UUID] + + if after != nil { + globalID := fromGlobalID(*after) + afterUUID, err := uuid.Parse(globalID.id) + if err != nil { + return nil, err + } + afterCursor = &ent.Cursor{ID: afterUUID} + } else { + afterCursor = nil + } + + sbomQuery := b.client.BillOfMaterials.Query(). + Where([]predicate.BillOfMaterials{optionalPredicate(hasSBOMID, IDEQ)}...) + + hasSBOMConnection, err := getSBOMObjectWithOutIncludes(sbomQuery).Paginate(ctx, afterCursor, first, nil, nil) + if err != nil { + return nil, fmt.Errorf("failed hasSBOM query with error: %w", err) + } + + // Large SBOMs (50MB+) hit the postgres parameter issue (HasSBOM: pq: got 97137 parameters but PostgreSQL only supports 65535 parameters). + // To overcome this, we can breakout the "included" pieces of the hasSBOM node into individual queries and reconstruct the node at the end. + + reconstructedSBOMs := map[string]*model.HasSbom{} + includedFirst := 60000 + + type depResult struct { + deps []*ent.Dependency + depErr error + } + + type occurResult struct { + occurs []*ent.Occurrence + occurErr error + } + + for _, foundSBOM := range hasSBOMConnection.Edges { + + var includedDeps []*ent.Dependency + var includedOccurs []*ent.Occurrence + + depsChan := make(chan depResult, 1) + occursChan := make(chan occurResult, 1) + + sbomID := foundSBOM.Cursor.ID.String() + + // query included dependencies but only return IDs + go func(ctx context.Context, b *EntBackend, sbomID string, first int, artChan chan<- depResult) { + var afterCursor *entgql.Cursor[uuid.UUID] + defer close(depsChan) + for { + isDepQuery := b.client.Dependency.Query(). + Where(dependency.HasIncludedInSbomsWith([]predicate.BillOfMaterials{ + optionalPredicate(&sbomID, IDEQ)}...)) + + depConnect, err := getIsDepObjectWithoutEdges(isDepQuery). + Paginate(ctx, afterCursor, &first, nil, nil) + if err != nil { + depsChan <- depResult{deps: nil, + depErr: fmt.Errorf("failed included dependency query for hasSBOM with error: %w", err)} + } + + var paginatedDeps []*ent.Dependency + + for _, edge := range depConnect.Edges { + paginatedDeps = append(paginatedDeps, edge.Node) + } + + depsChan <- depResult{deps: paginatedDeps, + depErr: nil} + + if !depConnect.PageInfo.HasNextPage { + break + } + afterCursor = depConnect.PageInfo.EndCursor + } + }(ctx, b, sbomID, includedFirst, depsChan) + + // query included occurrences + go func(ctx context.Context, b *EntBackend, sbomID string, first int, occursChan chan<- occurResult) { + var afterCursor *entgql.Cursor[uuid.UUID] + defer close(occursChan) + for { + occurQuery := b.client.Occurrence.Query(). + Where(occurrence.HasIncludedInSbomsWith([]predicate.BillOfMaterials{ + optionalPredicate(&sbomID, IDEQ)}...)) + + occurConnect, err := getOccurrenceObjectWithoutEdges(occurQuery). + Paginate(ctx, afterCursor, &first, nil, nil) + if err != nil { + occursChan <- occurResult{occurs: nil, + occurErr: fmt.Errorf("failed included occurrence query for hasSBOM with error: %w", err)} + } + + var paginatedOccurs []*ent.Occurrence + + for _, edge := range occurConnect.Edges { + paginatedOccurs = append(paginatedOccurs, edge.Node) + } + + occursChan <- occurResult{occurs: paginatedOccurs, + occurErr: nil} + + if !occurConnect.PageInfo.HasNextPage { + break + } + afterCursor = occurConnect.PageInfo.EndCursor + } + }(ctx, b, sbomID, includedFirst, occursChan) + + for occur := range occursChan { + if occur.occurErr != nil { + return nil, fmt.Errorf("occurrence channel failure: %w", occur.occurErr) + } + includedOccurs = append(includedOccurs, occur.occurs...) + } + + for dep := range depsChan { + if dep.depErr != nil { + return nil, fmt.Errorf("dependency channel failure: %w", dep.depErr) + } + includedDeps = append(includedDeps, dep.deps...) + } + reconstructedSBOM := toModelHasSBOMWithJustIncludedIDs(foundSBOM.Node, includedDeps, includedOccurs) + reconstructedSBOMs[sbomID] = reconstructedSBOM + } + + var edges []*model.HasSBOMEdge + for id, edge := range reconstructedSBOMs { + edges = append(edges, &model.HasSBOMEdge{ + Cursor: hasSBOMGlobalID(id), + Node: edge, + }) + } + + if hasSBOMConnection.PageInfo.StartCursor != nil { + return &model.HasSBOMConnection{ + TotalCount: hasSBOMConnection.TotalCount, + PageInfo: &model.PageInfo{ + HasNextPage: hasSBOMConnection.PageInfo.HasNextPage, + StartCursor: ptrfrom.String(hasSBOMGlobalID(hasSBOMConnection.PageInfo.StartCursor.ID.String())), + EndCursor: ptrfrom.String(hasSBOMGlobalID(hasSBOMConnection.PageInfo.EndCursor.ID.String())), + }, + Edges: edges, + }, nil + } else { + // if not found return nil + return nil, nil + } +} + +func (b *EntBackend) deleteHasSbom(ctx context.Context, hasSBOMID uuid.UUID) (bool, error) { + _, txErr := WithinTX(ctx, b.client, func(ctx context.Context) (*string, error) { + tx := ent.TxFromContext(ctx) + + first := 1 + hasSBOMIDString := hasSBOMID.String() + hasSBOMConnection, err := b.getHasSBOMForDeletion(ctx, &hasSBOMIDString, nil, &first) + if err != nil { + return nil, errors.Wrap(err, "failed to query for hasSBOM node for deletion with error") + } + + // first delete isDependency and isOccurrence nodes that are part of the hasSBOM node + for _, hasSBOMNEdge := range hasSBOMConnection.Edges { + for _, isDep := range hasSBOMNEdge.Node.IncludedDependencies { + foundGlobalID := fromGlobalID(isDep.ID) + if foundGlobalID.nodeType == "" { + return nil, fmt.Errorf("failed to parse globalID %s. Missing Node Type", isDep.ID) + } + // return uuid if valid, else error + isDepID, err := uuid.Parse(foundGlobalID.id) + if err != nil { + return nil, fmt.Errorf("isDependency uuid conversion from string failed with error: %w", err) + } + if err := b.deleteIsDependency(ctx, isDepID); err != nil { + return nil, fmt.Errorf("failed to delete isDependency with error: %w", err) + } + } + for _, isOccur := range hasSBOMNEdge.Node.IncludedOccurrences { + foundGlobalID := fromGlobalID(isOccur.ID) + if foundGlobalID.nodeType == "" { + return nil, fmt.Errorf("failed to parse globalID %s. Missing Node Type", isOccur.ID) + } + // return uuid if valid, else error + isOccurID, err := uuid.Parse(foundGlobalID.id) + if err != nil { + return nil, fmt.Errorf("isOccurrence uuid conversion from string failed with error: %w", err) + } + if err := b.deleteIsOccurrences(ctx, isOccurID); err != nil { + return nil, fmt.Errorf("failed to delete isOccurrence with error: %w", err) + } + } + } + // delete hasSBOM node + if err := tx.BillOfMaterials.DeleteOneID(hasSBOMID).Exec(ctx); err != nil { + return nil, errors.Wrap(err, "failed to delete hasSBOM with error") + } + return nil, nil + }) + if txErr != nil { + return false, txErr + } + return true, nil +} + func (b *EntBackend) IngestHasSbom(ctx context.Context, subject model.PackageOrArtifactInput, spec model.HasSBOMInputSpec, includes model.HasSBOMIncludesInputSpec) (string, error) { funcName := "IngestHasSbom" diff --git a/pkg/assembler/backends/ent/backend/slsa.go b/pkg/assembler/backends/ent/backend/slsa.go index 7ea24bfdd5..1ec359b7f3 100644 --- a/pkg/assembler/backends/ent/backend/slsa.go +++ b/pkg/assembler/backends/ent/backend/slsa.go @@ -145,6 +145,21 @@ func getSLSAObject(q *ent.SLSAAttestationQuery) *ent.SLSAAttestationQuery { WithBuiltFrom() } +func (b *EntBackend) deleteSLSA(ctx context.Context, SLSAID uuid.UUID) (bool, error) { + _, txErr := WithinTX(ctx, b.client, func(ctx context.Context) (*string, error) { + tx := ent.TxFromContext(ctx) + + if err := tx.SLSAAttestation.DeleteOneID(SLSAID).Exec(ctx); err != nil { + return nil, errors.Wrap(err, "failed to delete hasSLSA with error") + } + return nil, nil + }) + if txErr != nil { + return false, txErr + } + return true, nil +} + func (b *EntBackend) IngestSLSA(ctx context.Context, subject model.IDorArtifactInput, builtFrom []*model.IDorArtifactInput, builtBy model.IDorBuilderInput, slsa model.SLSAInputSpec) (string, error) { id, txErr := WithinTX(ctx, b.client, func(ctx context.Context) (*string, error) { return upsertSLSA(ctx, ent.TxFromContext(ctx), subject, builtFrom, builtBy, slsa) diff --git a/pkg/assembler/backends/ent/backend/transforms.go b/pkg/assembler/backends/ent/backend/transforms.go index 33bdad293c..58344f6294 100644 --- a/pkg/assembler/backends/ent/backend/transforms.go +++ b/pkg/assembler/backends/ent/backend/transforms.go @@ -173,55 +173,49 @@ func valueOrDefault[T any](v *T, def T) T { return *v } -func toModelIsOccurrenceWithSubject(o *ent.Occurrence) *model.IsOccurrence { - return &model.IsOccurrence{ - ID: occurrenceGlobalID(o.ID.String()), - Subject: toModelPackageOrSource(o.Edges.Package, o.Edges.Source), - Artifact: toModelArtifact(o.Edges.Artifact), - Justification: o.Justification, - Origin: o.Origin, - Collector: o.Collector, - DocumentRef: o.DocumentRef, - } +func toModelIsOccurrenceWithSubject(id *ent.Occurrence) *model.IsOccurrence { + return toModelIsOccurrence(id, true) +} + +func toModelIsOccurrenceWithoutSubject(id *ent.Occurrence) *model.IsOccurrence { + return toModelIsOccurrence(id, false) } -//func toModelIsOccurrence(o *ent.Occurrence, sub model.PackageOrSource) *model.IsOccurrence { -// return &model.IsOccurrence{ -// ID: nodeID(o.ID), -// Subject: sub, -// Artifact: toModelArtifact(o.Edges.Artifact), -// Justification: o.Justification, -// Origin: o.Origin, -// Collector: o.Collector, -// } -//} - -//func pkgQualifierInputSpecToQuerySpec(input []*model.PackageQualifierInputSpec) []*model.PackageQualifierSpec { -// if input == nil { -// return nil -// } -// out := make([]*model.PackageQualifierSpec, len(input)) -// for i, in := range input { -// out[i] = &model.PackageQualifierSpec{ -// Key: in.Key, -// Value: &in.Value, -// } -// } -// return out -//} +func toModelIsOccurrence(o *ent.Occurrence, backrefs bool) *model.IsOccurrence { + if backrefs { + return &model.IsOccurrence{ + ID: occurrenceGlobalID(o.ID.String()), + Subject: toModelPackageOrSource(o.Edges.Package, o.Edges.Source), + Artifact: toModelArtifact(o.Edges.Artifact), + Justification: o.Justification, + Origin: o.Origin, + Collector: o.Collector, + DocumentRef: o.DocumentRef, + } + } else { + return &model.IsOccurrence{ + ID: occurrenceGlobalID(o.ID.String()), + Justification: o.Justification, + Origin: o.Origin, + Collector: o.Collector, + DocumentRef: o.DocumentRef, + } + } +} func toModelIsDependencyWithBackrefs(id *ent.Dependency) *model.IsDependency { return toModelIsDependency(id, true) } -//func toModelIsDependencyWithoutBackrefs(id *ent.Dependency) *model.IsDependency { -// return toModelIsDependency(id, false) -//} +func toModelIsDependencyWithoutBackrefs(id *ent.Dependency) *model.IsDependency { + return toModelIsDependency(id, false) +} func toModelIsDependency(id *ent.Dependency, backrefs bool) *model.IsDependency { - var pkg *model.Package - var depPkg *model.Package + if backrefs { + var pkg *model.Package + var depPkg *model.Package pkg = toModelPackage(backReferencePackageVersion(id.Edges.Package)) if id.Edges.DependentPackageName != nil { depPkg = toModelPackage(backReferencePackageName(id.Edges.DependentPackageName)) @@ -230,28 +224,28 @@ func toModelIsDependency(id *ent.Dependency, backrefs bool) *model.IsDependency } else { depPkg = toModelPackage(backReferencePackageVersion(id.Edges.DependentPackageVersion)) } + return &model.IsDependency{ + ID: dependencyGlobalID(id.ID.String()), + Package: pkg, + DependencyPackage: depPkg, + VersionRange: id.VersionRange, + DependencyType: dependencyTypeFromEnum(id.DependencyType), + Justification: id.Justification, + Origin: id.Origin, + Collector: id.Collector, + DocumentRef: id.DocumentRef, + } } else { - pkg = toModelPackage(id.Edges.Package.Edges.Name) - if id.Edges.DependentPackageName != nil { - depPkg = toModelPackage(id.Edges.DependentPackageName) - // in this case, the expected response is package name with an empty package version array - depPkg.Namespaces[0].Names[0].Versions = []*model.PackageVersion{} - } else { - depPkg = toModelPackage(id.Edges.DependentPackageVersion.Edges.Name) + return &model.IsDependency{ + ID: dependencyGlobalID(id.ID.String()), + VersionRange: id.VersionRange, + DependencyType: dependencyTypeFromEnum(id.DependencyType), + Justification: id.Justification, + Origin: id.Origin, + Collector: id.Collector, + DocumentRef: id.DocumentRef, } } - - return &model.IsDependency{ - ID: dependencyGlobalID(id.ID.String()), - Package: pkg, - DependencyPackage: depPkg, - VersionRange: id.VersionRange, - DependencyType: dependencyTypeFromEnum(id.DependencyType), - Justification: id.Justification, - Origin: id.Origin, - Collector: id.Collector, - DocumentRef: id.DocumentRef, - } } func dependencyTypeFromEnum(t dependency.DependencyType) model.DependencyType { @@ -284,6 +278,26 @@ func toModelHasSBOMWithIncluded(sbom *ent.BillOfMaterials, includedSoftwarePacka IncludedOccurrences: collect(includedOccurrences, toModelIsOccurrenceWithSubject), } } + +func toModelHasSBOMWithJustIncludedIDs(sbom *ent.BillOfMaterials, includedDependencies []*ent.Dependency, includedOccurrences []*ent.Occurrence) *model.HasSbom { + + return &model.HasSbom{ + ID: hasSBOMGlobalID(sbom.ID.String()), + Subject: toPackageOrArtifact(sbom.Edges.Package, sbom.Edges.Artifact), + URI: sbom.URI, + Algorithm: sbom.Algorithm, + Digest: sbom.Digest, + DownloadLocation: sbom.DownloadLocation, + Origin: sbom.Origin, + Collector: sbom.Collector, + DocumentRef: sbom.DocumentRef, + KnownSince: sbom.KnownSince, + IncludedSoftware: nil, + IncludedDependencies: collect(includedDependencies, toModelIsDependencyWithoutBackrefs), + IncludedOccurrences: collect(includedOccurrences, toModelIsOccurrenceWithoutSubject), + } +} + func toModelHasSBOM(sbom *ent.BillOfMaterials) *model.HasSbom { return &model.HasSbom{ ID: hasSBOMGlobalID(sbom.ID.String()), diff --git a/pkg/assembler/backends/ent/migrate/migrations/20240626153721_ent_diff.sql b/pkg/assembler/backends/ent/migrate/migrations/20240626153721_ent_diff.sql new file mode 100644 index 0000000000..c1c7758220 --- /dev/null +++ b/pkg/assembler/backends/ent/migrate/migrations/20240626153721_ent_diff.sql @@ -0,0 +1,10 @@ +-- Modify "bill_of_materials" table +ALTER TABLE "bill_of_materials" DROP CONSTRAINT "bill_of_materials_artifacts_artifact", DROP CONSTRAINT "bill_of_materials_package_versions_package", ADD CONSTRAINT "bill_of_materials_artifacts_artifact" FOREIGN KEY ("artifact_id") REFERENCES "artifacts" ("id") ON UPDATE NO ACTION ON DELETE CASCADE, ADD CONSTRAINT "bill_of_materials_package_versions_package" FOREIGN KEY ("package_id") REFERENCES "package_versions" ("id") ON UPDATE NO ACTION ON DELETE CASCADE; +-- Modify "certify_vulns" table +ALTER TABLE "certify_vulns" DROP CONSTRAINT "certify_vulns_package_versions_package", DROP CONSTRAINT "certify_vulns_vulnerability_ids_vulnerability", ADD CONSTRAINT "certify_vulns_package_versions_package" FOREIGN KEY ("package_id") REFERENCES "package_versions" ("id") ON UPDATE NO ACTION ON DELETE CASCADE, ADD CONSTRAINT "certify_vulns_vulnerability_ids_vulnerability" FOREIGN KEY ("vulnerability_id") REFERENCES "vulnerability_ids" ("id") ON UPDATE NO ACTION ON DELETE CASCADE; +-- Modify "dependencies" table +ALTER TABLE "dependencies" DROP CONSTRAINT "dependencies_package_names_dependent_package_name", DROP CONSTRAINT "dependencies_package_versions_dependent_package_version", DROP CONSTRAINT "dependencies_package_versions_package", ADD CONSTRAINT "dependencies_package_names_dependent_package_name" FOREIGN KEY ("dependent_package_name_id") REFERENCES "package_names" ("id") ON UPDATE NO ACTION ON DELETE CASCADE, ADD CONSTRAINT "dependencies_package_versions_dependent_package_version" FOREIGN KEY ("dependent_package_version_id") REFERENCES "package_versions" ("id") ON UPDATE NO ACTION ON DELETE CASCADE, ADD CONSTRAINT "dependencies_package_versions_package" FOREIGN KEY ("package_id") REFERENCES "package_versions" ("id") ON UPDATE NO ACTION ON DELETE CASCADE; +-- Modify "occurrences" table +ALTER TABLE "occurrences" DROP CONSTRAINT "occurrences_artifacts_artifact", DROP CONSTRAINT "occurrences_package_versions_package", DROP CONSTRAINT "occurrences_source_names_source", ADD CONSTRAINT "occurrences_artifacts_artifact" FOREIGN KEY ("artifact_id") REFERENCES "artifacts" ("id") ON UPDATE NO ACTION ON DELETE CASCADE, ADD CONSTRAINT "occurrences_package_versions_package" FOREIGN KEY ("package_id") REFERENCES "package_versions" ("id") ON UPDATE NO ACTION ON DELETE CASCADE, ADD CONSTRAINT "occurrences_source_names_source" FOREIGN KEY ("source_id") REFERENCES "source_names" ("id") ON UPDATE NO ACTION ON DELETE CASCADE; +-- Modify "slsa_attestations" table +ALTER TABLE "slsa_attestations" DROP CONSTRAINT "slsa_attestations_artifacts_subject", DROP CONSTRAINT "slsa_attestations_builders_built_by", ADD CONSTRAINT "slsa_attestations_artifacts_subject" FOREIGN KEY ("subject_id") REFERENCES "artifacts" ("id") ON UPDATE NO ACTION ON DELETE CASCADE, ADD CONSTRAINT "slsa_attestations_builders_built_by" FOREIGN KEY ("built_by_id") REFERENCES "builders" ("id") ON UPDATE NO ACTION ON DELETE CASCADE; diff --git a/pkg/assembler/backends/ent/migrate/migrations/atlas.sum b/pkg/assembler/backends/ent/migrate/migrations/atlas.sum index 999ec13efa..6e40b39c27 100644 --- a/pkg/assembler/backends/ent/migrate/migrations/atlas.sum +++ b/pkg/assembler/backends/ent/migrate/migrations/atlas.sum @@ -1,2 +1,3 @@ -h1:2FEz7GgciNWnEenC+a5I4fVLkWd6xlS1ze/Z/CG+j2w= +h1:Z0P5t/epO2WlS9l+KgcJj/fXXUbYqg9ASBTrDwiUVQ4= 20240503123155_baseline.sql h1:oZtbKI8sJj3xQq7ibfvfhFoVl+Oa67CWP7DFrsVLVds= +20240626153721_ent_diff.sql h1:FvV1xELikdPbtJk7kxIZn9MhvVVoFLF/2/iT/wM5RkA= diff --git a/pkg/assembler/backends/ent/migrate/schema.go b/pkg/assembler/backends/ent/migrate/schema.go index 8c88a888b6..f3fccd19b8 100644 --- a/pkg/assembler/backends/ent/migrate/schema.go +++ b/pkg/assembler/backends/ent/migrate/schema.go @@ -56,13 +56,13 @@ var ( Symbol: "bill_of_materials_package_versions_package", Columns: []*schema.Column{BillOfMaterialsColumns[13]}, RefColumns: []*schema.Column{PackageVersionsColumns[0]}, - OnDelete: schema.SetNull, + OnDelete: schema.Cascade, }, { Symbol: "bill_of_materials_artifacts_artifact", Columns: []*schema.Column{BillOfMaterialsColumns[14]}, RefColumns: []*schema.Column{ArtifactsColumns[0]}, - OnDelete: schema.SetNull, + OnDelete: schema.Cascade, }, }, Indexes: []*schema.Index{ @@ -354,13 +354,13 @@ var ( Symbol: "certify_vulns_vulnerability_ids_vulnerability", Columns: []*schema.Column{CertifyVulnsColumns[9]}, RefColumns: []*schema.Column{VulnerabilityIdsColumns[0]}, - OnDelete: schema.NoAction, + OnDelete: schema.Cascade, }, { Symbol: "certify_vulns_package_versions_package", Columns: []*schema.Column{CertifyVulnsColumns[10]}, RefColumns: []*schema.Column{PackageVersionsColumns[0]}, - OnDelete: schema.NoAction, + OnDelete: schema.Cascade, }, }, Indexes: []*schema.Index{ @@ -394,19 +394,19 @@ var ( Symbol: "dependencies_package_versions_package", Columns: []*schema.Column{DependenciesColumns[7]}, RefColumns: []*schema.Column{PackageVersionsColumns[0]}, - OnDelete: schema.NoAction, + OnDelete: schema.Cascade, }, { Symbol: "dependencies_package_names_dependent_package_name", Columns: []*schema.Column{DependenciesColumns[8]}, RefColumns: []*schema.Column{PackageNamesColumns[0]}, - OnDelete: schema.SetNull, + OnDelete: schema.Cascade, }, { Symbol: "dependencies_package_versions_dependent_package_version", Columns: []*schema.Column{DependenciesColumns[9]}, RefColumns: []*schema.Column{PackageVersionsColumns[0]}, - OnDelete: schema.SetNull, + OnDelete: schema.Cascade, }, }, Indexes: []*schema.Index{ @@ -644,19 +644,19 @@ var ( Symbol: "occurrences_artifacts_artifact", Columns: []*schema.Column{OccurrencesColumns[5]}, RefColumns: []*schema.Column{ArtifactsColumns[0]}, - OnDelete: schema.NoAction, + OnDelete: schema.Cascade, }, { Symbol: "occurrences_package_versions_package", Columns: []*schema.Column{OccurrencesColumns[6]}, RefColumns: []*schema.Column{PackageVersionsColumns[0]}, - OnDelete: schema.SetNull, + OnDelete: schema.Cascade, }, { Symbol: "occurrences_source_names_source", Columns: []*schema.Column{OccurrencesColumns[7]}, RefColumns: []*schema.Column{SourceNamesColumns[0]}, - OnDelete: schema.SetNull, + OnDelete: schema.Cascade, }, }, Indexes: []*schema.Index{ @@ -887,13 +887,13 @@ var ( Symbol: "slsa_attestations_builders_built_by", Columns: []*schema.Column{SlsaAttestationsColumns[10]}, RefColumns: []*schema.Column{BuildersColumns[0]}, - OnDelete: schema.NoAction, + OnDelete: schema.Cascade, }, { Symbol: "slsa_attestations_artifacts_subject", Columns: []*schema.Column{SlsaAttestationsColumns[11]}, RefColumns: []*schema.Column{ArtifactsColumns[0]}, - OnDelete: schema.NoAction, + OnDelete: schema.Cascade, }, }, Indexes: []*schema.Index{ @@ -1272,16 +1272,21 @@ func init() { VulnerabilityMetadataTable.ForeignKeys[0].RefTable = VulnerabilityIdsTable BillOfMaterialsIncludedSoftwarePackagesTable.ForeignKeys[0].RefTable = BillOfMaterialsTable BillOfMaterialsIncludedSoftwarePackagesTable.ForeignKeys[1].RefTable = PackageVersionsTable + BillOfMaterialsIncludedSoftwarePackagesTable.Annotation = &entsql.Annotation{} BillOfMaterialsIncludedSoftwareArtifactsTable.ForeignKeys[0].RefTable = BillOfMaterialsTable BillOfMaterialsIncludedSoftwareArtifactsTable.ForeignKeys[1].RefTable = ArtifactsTable + BillOfMaterialsIncludedSoftwareArtifactsTable.Annotation = &entsql.Annotation{} BillOfMaterialsIncludedDependenciesTable.ForeignKeys[0].RefTable = BillOfMaterialsTable BillOfMaterialsIncludedDependenciesTable.ForeignKeys[1].RefTable = DependenciesTable + BillOfMaterialsIncludedDependenciesTable.Annotation = &entsql.Annotation{} BillOfMaterialsIncludedOccurrencesTable.ForeignKeys[0].RefTable = BillOfMaterialsTable BillOfMaterialsIncludedOccurrencesTable.ForeignKeys[1].RefTable = OccurrencesTable + BillOfMaterialsIncludedOccurrencesTable.Annotation = &entsql.Annotation{} CertifyLegalDeclaredLicensesTable.ForeignKeys[0].RefTable = CertifyLegalsTable CertifyLegalDeclaredLicensesTable.ForeignKeys[1].RefTable = LicensesTable CertifyLegalDiscoveredLicensesTable.ForeignKeys[0].RefTable = CertifyLegalsTable CertifyLegalDiscoveredLicensesTable.ForeignKeys[1].RefTable = LicensesTable SlsaAttestationBuiltFromTable.ForeignKeys[0].RefTable = SlsaAttestationsTable SlsaAttestationBuiltFromTable.ForeignKeys[1].RefTable = ArtifactsTable + SlsaAttestationBuiltFromTable.Annotation = &entsql.Annotation{} } diff --git a/pkg/assembler/backends/ent/schema/billofmaterials.go b/pkg/assembler/backends/ent/schema/billofmaterials.go index 11a9f414c4..f23141047a 100644 --- a/pkg/assembler/backends/ent/schema/billofmaterials.go +++ b/pkg/assembler/backends/ent/schema/billofmaterials.go @@ -56,12 +56,12 @@ func (BillOfMaterials) Fields() []ent.Field { // Edges of the Material. func (BillOfMaterials) Edges() []ent.Edge { return []ent.Edge{ - edge.To("package", PackageVersion.Type).Field("package_id").Unique(), - edge.To("artifact", Artifact.Type).Field("artifact_id").Unique(), - edge.To("included_software_packages", PackageVersion.Type), - edge.To("included_software_artifacts", Artifact.Type), - edge.To("included_dependencies", Dependency.Type), - edge.To("included_occurrences", Occurrence.Type), + edge.To("package", PackageVersion.Type).Field("package_id").Unique().Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("artifact", Artifact.Type).Field("artifact_id").Unique().Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("included_software_packages", PackageVersion.Type).Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("included_software_artifacts", Artifact.Type).Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("included_dependencies", Dependency.Type).Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("included_occurrences", Occurrence.Type).Annotations(entsql.OnDelete(entsql.Cascade)), } } diff --git a/pkg/assembler/backends/ent/schema/certifyvuln.go b/pkg/assembler/backends/ent/schema/certifyvuln.go index d8dc075125..ef6c852d60 100644 --- a/pkg/assembler/backends/ent/schema/certifyvuln.go +++ b/pkg/assembler/backends/ent/schema/certifyvuln.go @@ -17,6 +17,7 @@ package schema import ( "entgo.io/ent" + "entgo.io/ent/dialect/entsql" "entgo.io/ent/schema/edge" "entgo.io/ent/schema/field" "entgo.io/ent/schema/index" @@ -51,10 +52,8 @@ func (CertifyVuln) Fields() []ent.Field { // Edges of the Vulnerability. func (CertifyVuln) Edges() []ent.Edge { return []ent.Edge{ - // edge.To("vulnerability", VulnerabilityID.Type).Unique().Field("vulnerability_id").Required().Annotations(entsql.OnDelete(entsql.Cascade)), - // edge.To("package", PackageVersion.Type).Unique().Field("package_id").Required().Annotations(entsql.OnDelete(entsql.Cascade)), - edge.To("vulnerability", VulnerabilityID.Type).Unique().Field("vulnerability_id").Required(), - edge.To("package", PackageVersion.Type).Unique().Field("package_id").Required(), + edge.To("vulnerability", VulnerabilityID.Type).Unique().Field("vulnerability_id").Required().Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("package", PackageVersion.Type).Unique().Field("package_id").Required().Annotations(entsql.OnDelete(entsql.Cascade)), } } diff --git a/pkg/assembler/backends/ent/schema/dependency.go b/pkg/assembler/backends/ent/schema/dependency.go index 6377b470cc..fcf924fd7b 100644 --- a/pkg/assembler/backends/ent/schema/dependency.go +++ b/pkg/assembler/backends/ent/schema/dependency.go @@ -63,14 +63,14 @@ func (Dependency) Edges() []ent.Edge { edge.To("package", PackageVersion.Type). Required(). Field("package_id"). - Unique(), + Unique().Annotations(entsql.OnDelete(entsql.Cascade)), edge.To("dependent_package_name", PackageName.Type). Field("dependent_package_name_id"). - Unique(), + Unique().Annotations(entsql.OnDelete(entsql.Cascade)), edge.To("dependent_package_version", PackageVersion.Type). Field("dependent_package_version_id"). - Unique(), - edge.From("included_in_sboms", BillOfMaterials.Type).Ref("included_dependencies"), + Unique().Annotations(entsql.OnDelete(entsql.Cascade)), + edge.From("included_in_sboms", BillOfMaterials.Type).Ref("included_dependencies").Annotations(entsql.OnDelete(entsql.Cascade)), } } diff --git a/pkg/assembler/backends/ent/schema/occurrence.go b/pkg/assembler/backends/ent/schema/occurrence.go index f21b539ba4..b49fa727f3 100644 --- a/pkg/assembler/backends/ent/schema/occurrence.go +++ b/pkg/assembler/backends/ent/schema/occurrence.go @@ -55,10 +55,10 @@ func (Occurrence) Fields() []ent.Field { // Edges of the Occurrence. func (Occurrence) Edges() []ent.Edge { return []ent.Edge{ - edge.To("artifact", Artifact.Type).Field("artifact_id").Unique().Required(), - edge.To("package", PackageVersion.Type).Unique().Field("package_id"), - edge.To("source", SourceName.Type).Unique().Field("source_id"), - edge.From("included_in_sboms", BillOfMaterials.Type).Ref("included_occurrences"), + edge.To("artifact", Artifact.Type).Field("artifact_id").Unique().Required().Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("package", PackageVersion.Type).Unique().Field("package_id").Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("source", SourceName.Type).Unique().Field("source_id").Annotations(entsql.OnDelete(entsql.Cascade)), + edge.From("included_in_sboms", BillOfMaterials.Type).Ref("included_occurrences").Annotations(entsql.OnDelete(entsql.Cascade)), } } diff --git a/pkg/assembler/backends/ent/schema/slsaattestation.go b/pkg/assembler/backends/ent/schema/slsaattestation.go index 1fbd8c33e9..b36b5dbfa1 100644 --- a/pkg/assembler/backends/ent/schema/slsaattestation.go +++ b/pkg/assembler/backends/ent/schema/slsaattestation.go @@ -62,9 +62,9 @@ func (SLSAAttestation) Fields() []ent.Field { // Edges of the SLSA. func (SLSAAttestation) Edges() []ent.Edge { return []ent.Edge{ - edge.To("built_from", Artifact.Type), - edge.To("built_by", Builder.Type).Unique().Field("built_by_id").Required(), - edge.To("subject", Artifact.Type).Unique().Field("subject_id").Required(), + edge.To("built_from", Artifact.Type).Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("built_by", Builder.Type).Unique().Field("built_by_id").Required().Annotations(entsql.OnDelete(entsql.Cascade)), + edge.To("subject", Artifact.Type).Unique().Field("subject_id").Required().Annotations(entsql.OnDelete(entsql.Cascade)), } } diff --git a/pkg/assembler/backends/keyvalue/path.go b/pkg/assembler/backends/keyvalue/path.go index 8f943c86cd..073100955c 100644 --- a/pkg/assembler/backends/keyvalue/path.go +++ b/pkg/assembler/backends/keyvalue/path.go @@ -190,6 +190,8 @@ func (c *demoClient) Nodes(ctx context.Context, ids []string) ([]model.Node, err return rv, nil } +// Delete node and all associated relationships. This functionality is only implemented for +// certifyVuln, HasSBOM and HasSLSA. func (c *demoClient) Delete(ctx context.Context, node string) (bool, error) { panic(fmt.Errorf("not implemented: Delete")) } diff --git a/pkg/assembler/clients/generated/operations.go b/pkg/assembler/clients/generated/operations.go index 1d50ad4852..bd93da1e6e 100644 --- a/pkg/assembler/clients/generated/operations.go +++ b/pkg/assembler/clients/generated/operations.go @@ -9143,7 +9143,11 @@ const ( // DeleteResponse is returned by Delete on success. type DeleteResponse struct { - // Delete node with ID and all associated relationships + // Delete node with ID and all associated relationships. + // Deletion is only implemented for HasSBOM, HasSLSA and CertifyVuln + // for the time being. Other may be added based on usecase but these + // were chosen to ensure that users do not end up making breaking changes + // to their database. Delete bool `json:"delete"` } diff --git a/pkg/assembler/clients/operations/delete.graphql b/pkg/assembler/clients/operations/delete.graphql index 82a01ff156..c9cf361cf4 100644 --- a/pkg/assembler/clients/operations/delete.graphql +++ b/pkg/assembler/clients/operations/delete.graphql @@ -16,7 +16,11 @@ # NOTE: This is experimental and might change in the future! # Delete nodes based on ID (and all associated edges) +# Deletion is only implemented for HasSBOM, HasSLSA and CertifyVuln +# for the time being. Other may be added based on usecase but these +# were chosen to ensure that users do not end up making breaking changes +# to their database. mutation Delete($nodeID: ID!) { delete(node: $nodeID) -} \ No newline at end of file +} diff --git a/pkg/assembler/graphql/generated/root_.generated.go b/pkg/assembler/graphql/generated/root_.generated.go index 49824c9de8..200ed0ac10 100644 --- a/pkg/assembler/graphql/generated/root_.generated.go +++ b/pkg/assembler/graphql/generated/root_.generated.go @@ -5815,7 +5815,11 @@ extend type Mutation { extend type Mutation { """ - Delete node with ID and all associated relationships + Delete node with ID and all associated relationships. + Deletion is only implemented for HasSBOM, HasSLSA and CertifyVuln + for the time being. Other may be added based on usecase but these + were chosen to ensure that users do not end up making breaking changes + to their database. """ delete(node: ID!): Boolean! } diff --git a/pkg/assembler/graphql/schema/delete.graphql b/pkg/assembler/graphql/schema/delete.graphql index b4036f2f2e..134394c5fe 100644 --- a/pkg/assembler/graphql/schema/delete.graphql +++ b/pkg/assembler/graphql/schema/delete.graphql @@ -17,7 +17,11 @@ extend type Mutation { """ - Delete node with ID and all associated relationships + Delete node with ID and all associated relationships. + Deletion is only implemented for HasSBOM, HasSLSA and CertifyVuln + for the time being. Other may be added based on usecase but these + were chosen to ensure that users do not end up making breaking changes + to their database. """ delete(node: ID!): Boolean! } diff --git a/pkg/certifier/components/root_package/root_package.go b/pkg/certifier/components/root_package/root_package.go index 5d6d687680..f758586a57 100644 --- a/pkg/certifier/components/root_package/root_package.go +++ b/pkg/certifier/components/root_package/root_package.go @@ -169,12 +169,6 @@ func (p *packageQuery) getPackageNodes(ctx context.Context, nodeChan chan<- *Pac if math.Abs(difference.Hours()) < float64(p.daysSinceLastScan*24) { certifyVulnFound = true } - if math.Abs(difference.Hours()) > float64(p.daysSinceLastScan*24) { - _, err := generated.Delete(ctx, p.client, vulns.Id) - if err != nil { - return fmt.Errorf("failed to delete certifyVuln node with ID: %s, with error: %w", vulns.Id, err) - } - } } else { certifyVulnFound = true break