From 9deb1f639a73acb22662eef85e2ee61b5a55ba77 Mon Sep 17 00:00:00 2001 From: Fabian Engelniederhammer Date: Fri, 20 Sep 2024 11:00:40 +0200 Subject: [PATCH 01/20] test(backend): test `/get-original-metadata` locked during new submissions This is a follow-up to #2846 --- .../controller/SubmissionController.kt | 4 +-- .../org/loculus/backend/model/SubmitModel.kt | 12 +++++++ .../submission/SubmissionDatabaseService.kt | 8 ----- .../submission/UploadDatabaseService.kt | 1 - .../GetOriginalMetadataEndpointTest.kt | 32 +++++++++++++++++++ 5 files changed, 46 insertions(+), 11 deletions(-) diff --git a/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt b/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt index 5b1c16cb3..979422d86 100644 --- a/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt +++ b/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt @@ -352,7 +352,7 @@ open class SubmissionController( ) @ApiResponse( responseCode = "423", - description = "Locked. The metadata is currently being processed.", + description = "Locked. New sequence entries are currently being uploaded.", ) @GetMapping("/get-original-metadata", produces = [MediaType.APPLICATION_JSON_VALUE]) fun getOriginalMetadata( @@ -375,7 +375,7 @@ open class SubmissionController( headers.add(HttpHeaders.CONTENT_ENCODING, compression.compressionName) } - val stillProcessing = submissionDatabaseService.checkIfStillProcessingSubmittedData() + val stillProcessing = submitModel.checkIfStillProcessingSubmittedData() if (stillProcessing) { return ResponseEntity.status(HttpStatus.LOCKED).build() } diff --git a/backend/src/main/kotlin/org/loculus/backend/model/SubmitModel.kt b/backend/src/main/kotlin/org/loculus/backend/model/SubmitModel.kt index 4037ce14c..8b2105896 100644 --- a/backend/src/main/kotlin/org/loculus/backend/model/SubmitModel.kt +++ b/backend/src/main/kotlin/org/loculus/backend/model/SubmitModel.kt @@ -17,11 +17,14 @@ import org.loculus.backend.controller.UnprocessableEntityException import org.loculus.backend.service.datauseterms.DataUseTermsPreconditionValidator import org.loculus.backend.service.groupmanagement.GroupManagementPreconditionValidator import org.loculus.backend.service.submission.CompressionAlgorithm +import org.loculus.backend.service.submission.MetadataUploadAuxTable +import org.loculus.backend.service.submission.SequenceUploadAuxTable import org.loculus.backend.service.submission.UploadDatabaseService import org.loculus.backend.utils.FastaReader import org.loculus.backend.utils.metadataEntryStreamAsSequence import org.loculus.backend.utils.revisionEntryStreamAsSequence import org.springframework.stereotype.Service +import org.springframework.transaction.annotation.Transactional import org.springframework.web.multipart.MultipartFile import java.io.BufferedInputStream import java.io.File @@ -313,4 +316,13 @@ class SubmitModel( throw UnprocessableEntityException(metadataNotPresentErrorText + sequenceNotPresentErrorText) } } + + @Transactional(readOnly = true) + fun checkIfStillProcessingSubmittedData(): Boolean { + val metadataInAuxTable: Boolean = + MetadataUploadAuxTable.select(MetadataUploadAuxTable.submissionIdColumn).count() > 0 + val sequencesInAuxTable: Boolean = + SequenceUploadAuxTable.select(SequenceUploadAuxTable.sequenceSubmissionIdColumn).count() > 0 + return metadataInAuxTable || sequencesInAuxTable + } } diff --git a/backend/src/main/kotlin/org/loculus/backend/service/submission/SubmissionDatabaseService.kt b/backend/src/main/kotlin/org/loculus/backend/service/submission/SubmissionDatabaseService.kt index 6a8dd98f4..cbbe342bc 100644 --- a/backend/src/main/kotlin/org/loculus/backend/service/submission/SubmissionDatabaseService.kt +++ b/backend/src/main/kotlin/org/loculus/backend/service/submission/SubmissionDatabaseService.kt @@ -950,14 +950,6 @@ open class SubmissionDatabaseService( ) } - fun checkIfStillProcessingSubmittedData(): Boolean { - val metadataInAuxTable: Boolean = - MetadataUploadAuxTable.select(MetadataUploadAuxTable.submissionIdColumn).count() > 0 - val sequencesInAuxTable: Boolean = - SequenceUploadAuxTable.select(SequenceUploadAuxTable.sequenceSubmissionIdColumn).count() > 0 - return metadataInAuxTable || sequencesInAuxTable - } - fun streamOriginalMetadata( authenticatedUser: AuthenticatedUser, organism: Organism, diff --git a/backend/src/main/kotlin/org/loculus/backend/service/submission/UploadDatabaseService.kt b/backend/src/main/kotlin/org/loculus/backend/service/submission/UploadDatabaseService.kt index 0d4ddd86d..f21ef89c9 100644 --- a/backend/src/main/kotlin/org/loculus/backend/service/submission/UploadDatabaseService.kt +++ b/backend/src/main/kotlin/org/loculus/backend/service/submission/UploadDatabaseService.kt @@ -7,7 +7,6 @@ import org.jetbrains.exposed.sql.VarCharColumnType import org.jetbrains.exposed.sql.and import org.jetbrains.exposed.sql.batchInsert import org.jetbrains.exposed.sql.deleteWhere -import org.jetbrains.exposed.sql.select import org.jetbrains.exposed.sql.selectAll import org.jetbrains.exposed.sql.statements.StatementType import org.jetbrains.exposed.sql.transactions.transaction diff --git a/backend/src/test/kotlin/org/loculus/backend/controller/submission/GetOriginalMetadataEndpointTest.kt b/backend/src/test/kotlin/org/loculus/backend/controller/submission/GetOriginalMetadataEndpointTest.kt index d0b0b4a9d..d6b6f4ad5 100644 --- a/backend/src/test/kotlin/org/loculus/backend/controller/submission/GetOriginalMetadataEndpointTest.kt +++ b/backend/src/test/kotlin/org/loculus/backend/controller/submission/GetOriginalMetadataEndpointTest.kt @@ -2,13 +2,18 @@ package org.loculus.backend.controller.submission import com.fasterxml.jackson.module.kotlin.readValue import com.github.luben.zstd.ZstdInputStream +import io.mockk.every +import io.mockk.mockk +import kotlinx.datetime.LocalDateTime import org.hamcrest.CoreMatchers.`is` import org.hamcrest.MatcherAssert.assertThat import org.hamcrest.Matchers.hasSize import org.hamcrest.Matchers.not import org.junit.jupiter.api.Test import org.loculus.backend.api.AccessionVersionOriginalMetadata +import org.loculus.backend.api.Organism import org.loculus.backend.api.Status +import org.loculus.backend.auth.AuthenticatedUser import org.loculus.backend.controller.DEFAULT_ORGANISM import org.loculus.backend.controller.EndpointTest import org.loculus.backend.controller.OTHER_ORGANISM @@ -18,6 +23,8 @@ import org.loculus.backend.controller.groupmanagement.GroupManagementControllerC import org.loculus.backend.controller.groupmanagement.andGetGroupId import org.loculus.backend.controller.jacksonObjectMapper import org.loculus.backend.controller.submission.SubmitFiles.DefaultFiles +import org.loculus.backend.service.submission.UploadDatabaseService +import org.loculus.backend.utils.MetadataEntry import org.springframework.beans.factory.annotation.Autowired import org.springframework.http.HttpHeaders import org.springframework.http.MediaType @@ -33,6 +40,7 @@ class GetOriginalMetadataEndpointTest( @Autowired val convenienceClient: SubmissionConvenienceClient, @Autowired val submissionControllerClient: SubmissionControllerClient, @Autowired val groupManagementClient: GroupManagementControllerClient, + @Autowired val uploadDatabaseService: UploadDatabaseService, ) { @Test fun `GIVEN invalid authorization token THEN returns 401 Unauthorized`() { @@ -148,4 +156,28 @@ class GetOriginalMetadataEndpointTest( val responseAccessionVersions = responseBody.map { it.displayAccessionVersion() }.toSet() assertThat(responseAccessionVersions, `is`(expectedAccessionVersions)) } + + @Test + fun `GIVEN there are sequences currently being uploaded THEN returns locked`() { + val uploadId = "upload id" + val mockUser = mockk() + every { mockUser.username }.returns("username") + + uploadDatabaseService.batchInsertMetadataInAuxTable( + uploadId = uploadId, + authenticatedUser = mockUser, + groupId = 1, + submittedOrganism = Organism("organism"), + uploadedMetadataBatch = listOf(MetadataEntry("submission id", mapOf("key" to "value"))), + uploadedAt = LocalDateTime(2024, 1, 1, 1, 1, 1), + ) + + submissionControllerClient.getOriginalMetadata() + .andExpect(status().isLocked) + + uploadDatabaseService.deleteUploadData(uploadId) + + submissionControllerClient.getOriginalMetadata() + .andExpect(status().isOk) + } } From d38457bbb47877e4bef6902007a81b2e5597013c Mon Sep 17 00:00:00 2001 From: Cornelius Roemer Date: Fri, 20 Sep 2024 14:52:42 +0200 Subject: [PATCH 02/20] chore: make start_dev.sh maintainable by breaking lines (#2853) --- backend/start_dev.sh | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/backend/start_dev.sh b/backend/start_dev.sh index 76cfa5906..f228bf5e4 100755 --- a/backend/start_dev.sh +++ b/backend/start_dev.sh @@ -1,2 +1,17 @@ #! /bin/sh -./gradlew bootRun --args="--spring.datasource.url=jdbc:postgresql://localhost:5432/loculus --spring.datasource.username=postgres --spring.datasource.password=unsecure --loculus.config.path=../website/tests/config/backend_config.json --loculus.debug-mode=true --spring.security.oauth2.resourceserver.jwt.jwk-set-uri=http://localhost:8083/realms/loculus/protocol/openid-connect/certs --keycloak.user=backend --keycloak.password=backend --keycloak.realm=loculus --keycloak.client=backend-client --keycloak.url=http://localhost:8083" + +args=$(printf "%s " \ + "--spring.datasource.url=jdbc:postgresql://localhost:5432/loculus" \ + "--spring.datasource.username=postgres" \ + "--spring.datasource.password=unsecure" \ + "--loculus.config.path=../website/tests/config/backend_config.json" \ + "--loculus.debug-mode=true" \ + "--spring.security.oauth2.resourceserver.jwt.jwk-set-uri=http://localhost:8083/realms/loculus/protocol/openid-connect/certs" \ + "--keycloak.user=backend" \ + "--keycloak.password=backend" \ + "--keycloak.realm=loculus" \ + "--keycloak.client=backend-client" \ + "--keycloak.url=http://localhost:8083" +) + +./gradlew bootRun --args="$args" From bf0e31ec9b45236c601bc2921bceb053e76aa1ca Mon Sep 17 00:00:00 2001 From: Theo Sanderson Date: Fri, 20 Sep 2024 15:06:30 +0100 Subject: [PATCH 03/20] feat(website): One tab for all protein sequences (#2851) --- .../SequenceContainer.spec.tsx | 4 +- .../SequenceDetailsPage/SequenceViewer.tsx | 2 +- .../SequencesContainer.tsx | 191 +++++++++++++----- .../tests/pages/sequences/accession.spec.ts | 2 +- .../tests/pages/sequences/sequences.page.ts | 19 +- 5 files changed, 157 insertions(+), 61 deletions(-) diff --git a/website/src/components/SequenceDetailsPage/SequenceContainer.spec.tsx b/website/src/components/SequenceDetailsPage/SequenceContainer.spec.tsx index 0d975b336..676891c17 100644 --- a/website/src/components/SequenceDetailsPage/SequenceContainer.spec.tsx +++ b/website/src/components/SequenceDetailsPage/SequenceContainer.spec.tsx @@ -63,7 +63,7 @@ describe('SequencesContainer', () => { click('Load sequences'); - click('Aligned'); + click('Aligned nucleotide sequence'); await waitFor(() => { expect( screen.getByText(singleSegmentSequence, { @@ -72,7 +72,7 @@ describe('SequencesContainer', () => { ).toBeVisible(); }); - click('Sequence'); + click('Nucleotide sequence'); await waitFor(() => { expect( screen.getByText(unalignedSingleSegmentSequence, { diff --git a/website/src/components/SequenceDetailsPage/SequenceViewer.tsx b/website/src/components/SequenceDetailsPage/SequenceViewer.tsx index fb5a15b96..8445ae704 100644 --- a/website/src/components/SequenceDetailsPage/SequenceViewer.tsx +++ b/website/src/components/SequenceDetailsPage/SequenceViewer.tsx @@ -47,7 +47,7 @@ export const SequencesViewer: FC = ({ const header = '>' + data.name + (sequenceType.name === 'main' ? '' : `_${sequenceType.name}`); return ( -
+
); diff --git a/website/src/components/SequenceDetailsPage/SequencesContainer.tsx b/website/src/components/SequenceDetailsPage/SequencesContainer.tsx index 979c371b4..34e8901d2 100644 --- a/website/src/components/SequenceDetailsPage/SequencesContainer.tsx +++ b/website/src/components/SequenceDetailsPage/SequencesContainer.tsx @@ -49,75 +49,118 @@ export const InnerSequencesContainer: FC = ({ } return ( - <> - - - - - + ); }; export const SequencesContainer = withQueryProvider(InnerSequencesContainer); -type NucleotideSequenceTabsProps = { +type SequenceTabsProps = { + organism: string; + accessionVersion: string; + clientConfig: ClientConfig; nucleotideSegmentNames: NucleotideSegmentNames; sequenceType: SequenceType; setType: Dispatch>; + genes: string[]; }; -const SequenceTabs: FC = ({ +const SequenceTabs: FC = ({ + organism, + accessionVersion, + clientConfig, nucleotideSegmentNames, genes, sequenceType, setType, -}) => ( - - - - {genes.map((gene) => ( - setType(geneSequence(gene))} - label={gene} - key={gene} - /> - ))} - -); +}) => { + const [activeTab, setActiveTab] = useState<'unaligned' | 'aligned' | 'gene'>('unaligned'); + + useEffect(() => { + if (isUnalignedSequence(sequenceType)) { + setActiveTab('unaligned'); + } else if (isAlignedSequence(sequenceType)) { + setActiveTab('aligned'); + } else if (isGeneSequence(sequenceType.name, sequenceType)) { + setActiveTab('gene'); + } + }, [sequenceType]); + + return ( + <> + + + + setActiveTab('gene')} + /> + + + {activeTab === 'gene' && } + {activeTab !== 'gene' || isGeneSequence(sequenceType.name, sequenceType) ? ( + + ) : ( +
+ )} +
+ + ); +}; + +type NucleotideSequenceTabsProps = { + nucleotideSegmentNames: NucleotideSegmentNames; + sequenceType: SequenceType; + setType: Dispatch>; + isActive: boolean; + setActiveTab: (tab: 'unaligned' | 'aligned' | 'gene') => void; +}; const UnalignedNucleotideSequenceTabs: FC = ({ nucleotideSegmentNames, sequenceType, setType, + isActive, + setActiveTab, }) => { if (!isMultiSegmented(nucleotideSegmentNames)) { const onlySegment = nucleotideSegmentNames[0]; return ( setType(unalignedSequenceSegment(onlySegment))} - label='Sequence' + isActive={isActive} + onClick={() => { + setType(unalignedSequenceSegment(onlySegment)); + setActiveTab('unaligned'); + }} + label='Nucleotide sequence' /> ); } @@ -127,8 +170,11 @@ const UnalignedNucleotideSequenceTabs: FC = ({ {nucleotideSegmentNames.map((segmentName) => ( setType(unalignedSequenceSegment(segmentName))} + isActive={isActive && isUnalignedSequence(sequenceType) && segmentName === sequenceType.name} + onClick={() => { + setType(unalignedSequenceSegment(segmentName)); + setActiveTab('unaligned'); + }} label={`${segmentName} (unaligned)`} /> ))} @@ -136,15 +182,24 @@ const UnalignedNucleotideSequenceTabs: FC = ({ ); }; -const AlignmentSequenceTabs: FC = ({ nucleotideSegmentNames, sequenceType, setType }) => { +const AlignmentSequenceTabs: FC = ({ + nucleotideSegmentNames, + sequenceType, + setType, + isActive, + setActiveTab, +}) => { if (!isMultiSegmented(nucleotideSegmentNames)) { const onlySegment = nucleotideSegmentNames[0]; return ( setType(alignedSequenceSegment(onlySegment))} - label='Aligned' + isActive={isActive} + onClick={() => { + setType(alignedSequenceSegment(onlySegment)); + setActiveTab('aligned'); + }} + label='Aligned nucleotide sequence' /> ); } @@ -154,8 +209,11 @@ const AlignmentSequenceTabs: FC = ({ nucleotideSegm {nucleotideSegmentNames.map((segmentName) => ( setType(alignedSequenceSegment(segmentName))} + isActive={isActive && isAlignedSequence(sequenceType) && segmentName === sequenceType.name} + onClick={() => { + setType(alignedSequenceSegment(segmentName)); + setActiveTab('aligned'); + }} label={`${segmentName} (aligned)`} /> ))} @@ -163,6 +221,35 @@ const AlignmentSequenceTabs: FC = ({ nucleotideSegm ); }; +type GeneDropdownProps = { + genes: string[]; + sequenceType: SequenceType; + setType: Dispatch>; +}; + +const GeneDropdown: FC = ({ genes, sequenceType, setType }) => { + const selectedGene = isGeneSequence(sequenceType.name, sequenceType) ? sequenceType.name : ''; + + return ( +
+ +
+ ); +}; + function isMultiSegmented(nucleotideSegmentNames: string[]) { return nucleotideSegmentNames.length > 1; } diff --git a/website/tests/pages/sequences/accession.spec.ts b/website/tests/pages/sequences/accession.spec.ts index 4a55836d8..70f815aa6 100644 --- a/website/tests/pages/sequences/accession.spec.ts +++ b/website/tests/pages/sequences/accession.spec.ts @@ -11,7 +11,7 @@ test.describe('The detailed sequence page', () => { await expect(sequencePage.page.getByText(testSequenceEntryData.orf1a)).not.toBeVisible(); await sequencePage.loadSequences(); - await sequencePage.clickORF1aButton(); + await sequencePage.selectORF1a(); await expect(sequencePage.page.getByText(testSequenceEntryData.orf1a, { exact: false })).toBeVisible(); }); diff --git a/website/tests/pages/sequences/sequences.page.ts b/website/tests/pages/sequences/sequences.page.ts index f9d0a6daf..8975b2fc0 100644 --- a/website/tests/pages/sequences/sequences.page.ts +++ b/website/tests/pages/sequences/sequences.page.ts @@ -12,11 +12,13 @@ export class SequencePage { private readonly loadButton: Locator; private readonly allVersions: Locator; - private readonly orf1aButton: Locator; + private readonly specificProteinTab: Locator; + private readonly geneDropdown: Locator; constructor(public readonly page: Page) { this.loadButton = this.page.getByRole('button', { name: 'Load sequences' }); - this.orf1aButton = this.page.getByRole('button', { name: 'ORF1a' }); + this.specificProteinTab = this.page.getByRole('button', { name: 'Aligned amino acid sequences' }); + this.geneDropdown = this.page.locator('select'); this.allVersions = this.page.getByRole('link', { name: `All versions`, }); @@ -40,8 +42,15 @@ export class SequencePage { await this.loadButton.click(); } - public async clickORF1aButton() { - await expect(this.orf1aButton).toBeVisible(); - await this.orf1aButton.click(); + public async selectSpecificProtein(proteinName: string) { + await expect(this.specificProteinTab).toBeVisible(); + await this.specificProteinTab.click(); + + await expect(this.geneDropdown).toBeVisible(); + await this.geneDropdown.selectOption(proteinName); + } + + public async selectORF1a() { + await this.selectSpecificProtein('ORF1a'); } } From 5975698e00d387c4d82914076612b02882a32344 Mon Sep 17 00:00:00 2001 From: Theo Sanderson Date: Sun, 22 Sep 2024 22:22:22 +0100 Subject: [PATCH 04/20] fix(silo-preprocessing): Fix interpolation syntax error (#2856) Accidentally introduced in "feat(silo-prepro): use hash instead of line count to test for equality (#1488)" (b0523ad9) when changing the interpolation. Co-authored-by: Thomas Sibley --- kubernetes/loculus/silo_import_job.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kubernetes/loculus/silo_import_job.sh b/kubernetes/loculus/silo_import_job.sh index 7f1a66590..36796c235 100755 --- a/kubernetes/loculus/silo_import_job.sh +++ b/kubernetes/loculus/silo_import_job.sh @@ -140,7 +140,7 @@ download_data() { exit 0 else echo "Hashes are unequal, deleting old input data dir" - rm -rf "$old_input_data_dir:?}" + rm -rf "$old_input_data_dir" fi fi else From 64ec017c9f1a6b283a9e4f30535a5f70d3ea8902 Mon Sep 17 00:00:00 2001 From: Cornelius Roemer Date: Mon, 23 Sep 2024 10:34:31 +0200 Subject: [PATCH 05/20] feat(ingest/backend): add and use 'x-total-record' header for /get-original-metadata endpoint (#2857) --- .../controller/SubmissionController.kt | 28 +++++++++++-- .../submission/SubmissionDatabaseService.kt | 41 +++++++++++++++++-- .../GetOriginalMetadataEndpointTest.kt | 9 +++- ingest/scripts/call_loculus.py | 33 +++++++++------ 4 files changed, 90 insertions(+), 21 deletions(-) diff --git a/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt b/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt index 979422d86..55ab97b55 100644 --- a/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt +++ b/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt @@ -349,6 +349,13 @@ open class SubmissionController( @ApiResponse( responseCode = "200", description = GET_ORIGINAL_METADATA_RESPONSE_DESCRIPTION, + headers = [ + Header( + name = "x-total-records", + description = "The total number of records sent in responseBody", + schema = Schema(type = "integer"), + ), + ], ) @ApiResponse( responseCode = "423", @@ -369,16 +376,29 @@ open class SubmissionController( @HiddenParam authenticatedUser: AuthenticatedUser, @RequestParam compression: CompressionFormat?, ): ResponseEntity { + val stillProcessing = submitModel.checkIfStillProcessingSubmittedData() + if (stillProcessing) { + return ResponseEntity.status(HttpStatus.LOCKED).build() + } + val headers = HttpHeaders() headers.contentType = MediaType.parseMediaType(MediaType.APPLICATION_NDJSON_VALUE) if (compression != null) { headers.add(HttpHeaders.CONTENT_ENCODING, compression.compressionName) } - val stillProcessing = submitModel.checkIfStillProcessingSubmittedData() - if (stillProcessing) { - return ResponseEntity.status(HttpStatus.LOCKED).build() - } + val totalRecords = submissionDatabaseService.countOriginalMetadata( + authenticatedUser, + organism, + groupIdsFilter?.takeIf { it.isNotEmpty() }, + statusesFilter?.takeIf { it.isNotEmpty() }, + ) + headers.add("x-total-records", totalRecords.toString()) + // TODO(https://github.com/loculus-project/loculus/issues/2778) + // There's a possibility that the totalRecords change between the count and the actual query + // this is not too bad, if the client ends up with a few more records than expected + // We just need to make sure the etag used is from before the count + // Alternatively, we could read once to file while counting and then stream the file val streamBody = streamTransactioned(compression) { submissionDatabaseService.streamOriginalMetadata( diff --git a/backend/src/main/kotlin/org/loculus/backend/service/submission/SubmissionDatabaseService.kt b/backend/src/main/kotlin/org/loculus/backend/service/submission/SubmissionDatabaseService.kt index cbbe342bc..7c2041b6d 100644 --- a/backend/src/main/kotlin/org/loculus/backend/service/submission/SubmissionDatabaseService.kt +++ b/backend/src/main/kotlin/org/loculus/backend/service/submission/SubmissionDatabaseService.kt @@ -950,13 +950,12 @@ open class SubmissionDatabaseService( ) } - fun streamOriginalMetadata( + private fun originalMetadataFilter( authenticatedUser: AuthenticatedUser, organism: Organism, groupIdsFilter: List?, statusesFilter: List?, - fields: List?, - ): Sequence { + ): Op { val organismCondition = SequenceEntriesView.organismIs(organism) val groupCondition = getGroupCondition(groupIdsFilter, authenticatedUser) val statusCondition = if (statusesFilter != null) { @@ -966,6 +965,33 @@ open class SubmissionDatabaseService( } val conditions = organismCondition and groupCondition and statusCondition + return conditions + } + + fun countOriginalMetadata( + authenticatedUser: AuthenticatedUser, + organism: Organism, + groupIdsFilter: List?, + statusesFilter: List?, + ): Long = SequenceEntriesView + .selectAll() + .where( + originalMetadataFilter( + authenticatedUser, + organism, + groupIdsFilter, + statusesFilter, + ), + ) + .count() + + fun streamOriginalMetadata( + authenticatedUser: AuthenticatedUser, + organism: Organism, + groupIdsFilter: List?, + statusesFilter: List?, + fields: List?, + ): Sequence { val originalMetadata = SequenceEntriesView.originalDataColumn .extract>("metadata") .alias("original_metadata") @@ -976,7 +1002,14 @@ open class SubmissionDatabaseService( SequenceEntriesView.accessionColumn, SequenceEntriesView.versionColumn, ) - .where(conditions) + .where( + originalMetadataFilter( + authenticatedUser, + organism, + groupIdsFilter, + statusesFilter, + ), + ) .fetchSize(streamBatchSize) .asSequence() .map { diff --git a/backend/src/test/kotlin/org/loculus/backend/controller/submission/GetOriginalMetadataEndpointTest.kt b/backend/src/test/kotlin/org/loculus/backend/controller/submission/GetOriginalMetadataEndpointTest.kt index d6b6f4ad5..e1c71e0d9 100644 --- a/backend/src/test/kotlin/org/loculus/backend/controller/submission/GetOriginalMetadataEndpointTest.kt +++ b/backend/src/test/kotlin/org/loculus/backend/controller/submission/GetOriginalMetadataEndpointTest.kt @@ -52,8 +52,10 @@ class GetOriginalMetadataEndpointTest( @Test fun `GIVEN no sequence entries in database THEN returns empty response`() { val response = submissionControllerClient.getOriginalMetadata() - val responseBody = response.expectNdjsonAndGetContent() + + response.andExpect(status().isOk) + .andExpect(header().string("x-total-records", `is`("0"))) assertThat(responseBody, `is`(emptyList())) } @@ -63,6 +65,9 @@ class GetOriginalMetadataEndpointTest( val response = submissionControllerClient.getOriginalMetadata() val responseBody = response.expectNdjsonAndGetContent() + + response.andExpect(status().isOk) + .andExpect(header().string("x-total-records", `is`(DefaultFiles.NUMBER_OF_SEQUENCES.toString()))) assertThat(responseBody.size, `is`(DefaultFiles.NUMBER_OF_SEQUENCES)) } @@ -150,6 +155,8 @@ class GetOriginalMetadataEndpointTest( groupIdsFilter = listOf(g0), statusesFilter = listOf(Status.APPROVED_FOR_RELEASE), ) + response.andExpect(status().isOk) + .andExpect(header().string("x-total-records", `is`(expectedAccessionVersions.count().toString()))) val responseBody = response.expectNdjsonAndGetContent() assertThat(responseBody, hasSize(expected.size)) diff --git a/ingest/scripts/call_loculus.py b/ingest/scripts/call_loculus.py index e4a68d191..694f41a9f 100644 --- a/ingest/scripts/call_loculus.py +++ b/ingest/scripts/call_loculus.py @@ -312,20 +312,29 @@ def get_submitted(config: Config): "statusesFilter": [], } - logger.info("Getting previously submitted sequences") + while True: + logger.info("Getting previously submitted sequences") - response = make_request(HTTPMethod.GET, url, config, params=params) + response = make_request(HTTPMethod.GET, url, config, params=params) + expected_record_count = int(response.headers["x-total-records"]) - entries: list[dict[str, Any]] = [] - try: - entries = list(jsonlines.Reader(response.iter_lines()).iter()) - except jsonlines.Error as err: - response_summary = response.text - max_error_length = 100 - if len(response_summary) > max_error_length: - response_summary = response_summary[:50] + "\n[..]\n" + response_summary[-50:] - logger.error(f"Error decoding JSON from /get-original-metadata: {response_summary}") - raise ValueError from err + entries: list[dict[str, Any]] = [] + try: + entries = list(jsonlines.Reader(response.iter_lines()).iter()) + except jsonlines.Error as err: + response_summary = response.text + max_error_length = 100 + if len(response_summary) > max_error_length: + response_summary = response_summary[:50] + "\n[..]\n" + response_summary[-50:] + logger.error(f"Error decoding JSON from /get-original-metadata: {response_summary}") + raise ValueError from err + + if len(entries) == expected_record_count: + f"Got {len(entries)} records as expected" + break + logger.error(f"Got incomplete original metadata stream: expected {len(entries)}" + f"records but got {expected_record_count}. Retrying after 60 seconds.") + sleep(60) # Initialize the dictionary to store results submitted_dict: dict[str, dict[str, str | list]] = {} From fe9b8fea2e7d2b0e07d1201195a5b7218fea3bf8 Mon Sep 17 00:00:00 2001 From: Theo Sanderson Date: Mon, 23 Sep 2024 13:51:30 +0100 Subject: [PATCH 06/20] add reloader to ingest and move argocd sync options to right place (#2860) --- kubernetes/loculus/templates/ingest-deployment.yaml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/kubernetes/loculus/templates/ingest-deployment.yaml b/kubernetes/loculus/templates/ingest-deployment.yaml index 8492c8041..ebb2060f4 100644 --- a/kubernetes/loculus/templates/ingest-deployment.yaml +++ b/kubernetes/loculus/templates/ingest-deployment.yaml @@ -7,6 +7,9 @@ apiVersion: apps/v1 kind: Deployment metadata: name: loculus-ingest-deployment-{{ $key }} + annotations: + argocd.argoproj.io/sync-options: Force=true,Replace=true + reloader.stakater.com/auto: "true" spec: replicas: 1 strategy: @@ -20,8 +23,6 @@ spec: labels: app: loculus component: loculus-ingest-deployment-{{ $key }} - annotations: - argocd.argoproj.io/sync-options: Force=true,Replace=true spec: {{- include "possiblePriorityClassName" $ | nindent 6 }} containers: @@ -56,6 +57,9 @@ apiVersion: batch/v1 kind: CronJob metadata: name: loculus-revoke-and-regroup-cronjob-{{ $key }} + annotations: + argocd.argoproj.io/sync-options: Replace=true + reloader.stakater.com/auto: "true" spec: schedule: "0 0 31 2 *" # Never runs without manual trigger startingDeadlineSeconds: 60 @@ -68,9 +72,6 @@ spec: labels: app: loculus component: loculus-ingest-cronjob-{{ $key }} - annotations: - argocd.argoproj.io/sync-options: Replace=true - reloader.stakater.com/auto: "true" spec: restartPolicy: Never containers: From 1ebcdb05d93e2b6a2bb4c32bf04e7b089f86d986 Mon Sep 17 00:00:00 2001 From: Theo Sanderson Date: Mon, 23 Sep 2024 14:39:13 +0100 Subject: [PATCH 07/20] feat(deployment): ingest version checking (#2859) --- .../loculus/templates/ingest-config.yaml | 2 ++ .../loculus/templates/ingest-deployment.yaml | 22 ++++++++++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/kubernetes/loculus/templates/ingest-config.yaml b/kubernetes/loculus/templates/ingest-config.yaml index 53f21cb85..85786a24c 100644 --- a/kubernetes/loculus/templates/ingest-config.yaml +++ b/kubernetes/loculus/templates/ingest-config.yaml @@ -1,3 +1,4 @@ +{{- $dockerTag := include "loculus.dockerTag" .Values }} {{- $testconfig := .Values.testconfig | default false }} {{- $backendHost := .Values.environment | eq "server" | ternary (printf "https://backend%s%s" .Values.subdomainSeparator $.Values.host) ($testconfig | ternary "http://localhost:8079" "http://loculus-backend-service:8079") }} {{- $keycloakHost := .Values.environment | eq "server" | ternary (printf "https://authentication%s%s" $.Values.subdomainSeparator $.Values.host) ($testconfig | ternary "http://localhost:8083" "http://loculus-keycloak-service:8083") }} @@ -12,6 +13,7 @@ metadata: data: config.yaml: | {{- $values.ingest.configFile | toYaml | nindent 4 }} + verify_loculus_version_is: {{$dockerTag}} organism: {{ $key }} backend_url: {{ $backendHost }} keycloak_token_url: {{ $keycloakHost -}}/realms/loculus/protocol/openid-connect/token diff --git a/kubernetes/loculus/templates/ingest-deployment.yaml b/kubernetes/loculus/templates/ingest-deployment.yaml index ebb2060f4..47d4d41d8 100644 --- a/kubernetes/loculus/templates/ingest-deployment.yaml +++ b/kubernetes/loculus/templates/ingest-deployment.yaml @@ -25,6 +25,26 @@ spec: component: loculus-ingest-deployment-{{ $key }} spec: {{- include "possiblePriorityClassName" $ | nindent 6 }} + initContainers: + - name: version-check + image: busybox + {{- include "loculus.resources" (list "ingest-init" $.Values) | nindent 10 }} + command: ['sh', '-c', ' + CONFIG_VERSION=$(grep "verify_loculus_version_is:" /package/config/config.yaml | sed "s/verify_loculus_version_is: //;"); + DOCKER_TAG="{{ $dockerTag }}"; + echo "Config version: $CONFIG_VERSION"; + echo "Docker tag: $DOCKER_TAG"; + if [ "$CONFIG_VERSION" != "$DOCKER_TAG" ]; then + echo "Version mismatch: ConfigMap version $CONFIG_VERSION does not match docker tag $DOCKER_TAG"; + exit 1; + else + echo "Version match confirmed"; + fi + '] + volumeMounts: + - name: loculus-ingest-config-volume-{{ $key }} + mountPath: /package/config/config.yaml + subPath: config.yaml containers: - name: ingest-{{ $key }} image: {{ $value.ingest.image}}:{{ $dockerTag }} @@ -110,4 +130,4 @@ spec: {{- end }} {{- end }} {{- end }} -{{- end }} \ No newline at end of file +{{- end }} From 20bf7613fd994a7d2dff5425e6f8fcfe0bf58e72 Mon Sep 17 00:00:00 2001 From: "Anna (Anya) Parker" <50943381+anna-parker@users.noreply.github.com> Date: Mon, 23 Sep 2024 17:18:24 +0200 Subject: [PATCH 08/20] fix(deployment): suspend loculus-revoke-and-regroup-cronjob (#2862) --- kubernetes/loculus/templates/ingest-deployment.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/kubernetes/loculus/templates/ingest-deployment.yaml b/kubernetes/loculus/templates/ingest-deployment.yaml index 47d4d41d8..a76882ba0 100644 --- a/kubernetes/loculus/templates/ingest-deployment.yaml +++ b/kubernetes/loculus/templates/ingest-deployment.yaml @@ -82,6 +82,7 @@ metadata: reloader.stakater.com/auto: "true" spec: schedule: "0 0 31 2 *" # Never runs without manual trigger + suspend: true startingDeadlineSeconds: 60 concurrencyPolicy: Forbid jobTemplate: From 5188f4b8c764713b0e6daf36f18064e390dcf908 Mon Sep 17 00:00:00 2001 From: Theo Sanderson Date: Mon, 23 Sep 2024 16:57:34 +0100 Subject: [PATCH 09/20] Remove reloader from CronJob (#2863) --- kubernetes/loculus/templates/ingest-deployment.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/kubernetes/loculus/templates/ingest-deployment.yaml b/kubernetes/loculus/templates/ingest-deployment.yaml index a76882ba0..2545563d3 100644 --- a/kubernetes/loculus/templates/ingest-deployment.yaml +++ b/kubernetes/loculus/templates/ingest-deployment.yaml @@ -79,7 +79,6 @@ metadata: name: loculus-revoke-and-regroup-cronjob-{{ $key }} annotations: argocd.argoproj.io/sync-options: Replace=true - reloader.stakater.com/auto: "true" spec: schedule: "0 0 31 2 *" # Never runs without manual trigger suspend: true From 43fd0d84b647cce1cd7d5e49544f78507c76366d Mon Sep 17 00:00:00 2001 From: Theo Sanderson Date: Mon, 23 Sep 2024 17:08:44 +0100 Subject: [PATCH 10/20] blank preview (#2861) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index d1a354e8d..78e1eeb89 100644 --- a/README.md +++ b/README.md @@ -60,6 +60,7 @@ While the documentation is still a work in progress, a look at the [`.github/wor ## Authorization + ### User management We use keycloak for authorization. The keycloak instance is deployed in the `loculus` namespace and exposed to the outside either under `localhost:8083` or `authentication-[your-argo-cd-path]`. The keycloak instance is configured with a realm called `loculus` and a client called `backend-client`. The realm is configured to use the exposed url of keycloak as a [frontend url](https://www.keycloak.org/server/hostname). From c61ded5ad6327a8591dcc1f3c9d195e146802335 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 21:44:49 +0200 Subject: [PATCH 11/20] chore(deps): bump the minorandpatch group in /docs with 4 updates (#2864) Bumps the minorandpatch group in /docs with 4 updates: [@astrojs/starlight](https://github.com/withastro/starlight/tree/HEAD/packages/starlight), [@astrojs/tailwind](https://github.com/withastro/astro/tree/HEAD/packages/integrations/tailwind), [astro](https://github.com/withastro/astro/tree/HEAD/packages/astro) and [tailwindcss](https://github.com/tailwindlabs/tailwindcss). Updates `@astrojs/starlight` from 0.27.1 to 0.28.2 - [Release notes](https://github.com/withastro/starlight/releases) - [Changelog](https://github.com/withastro/starlight/blob/main/packages/starlight/CHANGELOG.md) - [Commits](https://github.com/withastro/starlight/commits/@astrojs/starlight@0.28.2/packages/starlight) Updates `@astrojs/tailwind` from 5.1.0 to 5.1.1 - [Release notes](https://github.com/withastro/astro/releases) - [Changelog](https://github.com/withastro/astro/blob/main/packages/integrations/tailwind/CHANGELOG.md) - [Commits](https://github.com/withastro/astro/commits/@astrojs/tailwind@5.1.1/packages/integrations/tailwind) Updates `astro` from 4.15.6 to 4.15.9 - [Release notes](https://github.com/withastro/astro/releases) - [Changelog](https://github.com/withastro/astro/blob/main/packages/astro/CHANGELOG.md) - [Commits](https://github.com/withastro/astro/commits/astro@4.15.9/packages/astro) Updates `tailwindcss` from 3.4.11 to 3.4.13 - [Release notes](https://github.com/tailwindlabs/tailwindcss/releases) - [Changelog](https://github.com/tailwindlabs/tailwindcss/blob/v3.4.13/CHANGELOG.md) - [Commits](https://github.com/tailwindlabs/tailwindcss/compare/v3.4.11...v3.4.13) --- updated-dependencies: - dependency-name: "@astrojs/starlight" dependency-type: direct:production update-type: version-update:semver-minor dependency-group: minorandpatch - dependency-name: "@astrojs/tailwind" dependency-type: direct:production update-type: version-update:semver-patch dependency-group: minorandpatch - dependency-name: astro dependency-type: direct:production update-type: version-update:semver-patch dependency-group: minorandpatch - dependency-name: tailwindcss dependency-type: direct:production update-type: version-update:semver-patch dependency-group: minorandpatch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/package-lock.json | 158 +++++++++++++++++++++++++++-------------- docs/package.json | 8 +-- 2 files changed, 107 insertions(+), 59 deletions(-) diff --git a/docs/package-lock.json b/docs/package-lock.json index 0c0590c7e..5e6983746 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -9,15 +9,15 @@ "version": "0.0.1", "dependencies": { "@astrojs/check": "^0.9.3", - "@astrojs/starlight": "^0.27.1", + "@astrojs/starlight": "^0.28.2", "@astrojs/starlight-tailwind": "^2.0.3", - "@astrojs/tailwind": "^5.1.0", - "astro": "^4.15.6", + "@astrojs/tailwind": "^5.1.1", + "astro": "^4.15.9", "prettier": "^3.3.3", "prettier-plugin-astro": "^0.14.1", "prettier-plugin-tailwindcss": "^0.6.6", "sharp": "^0.33.5", - "tailwindcss": "^3.4.11", + "tailwindcss": "^3.4.13", "typescript": "^5.6.2" } }, @@ -189,9 +189,9 @@ } }, "node_modules/@astrojs/starlight": { - "version": "0.27.1", - "resolved": "https://registry.npmjs.org/@astrojs/starlight/-/starlight-0.27.1.tgz", - "integrity": "sha512-L2hEgN/Tk7tfBDeaqUOgOpey5NcUL78FuQa06iNxyZ6RjyYyuXSniOoFxZYIo5PpY9O1dLdK22PkZyCDpO729g==", + "version": "0.28.2", + "resolved": "https://registry.npmjs.org/@astrojs/starlight/-/starlight-0.28.2.tgz", + "integrity": "sha512-Q1/Ujl2EzWX71qwqdt/0KP3wOyX6Rvyzcep/zD3hRCtw/Vi2TReh4Q2wLwz7mnbuYU9H7YvBKYknbkmjC+K/0w==", "dependencies": { "@astrojs/mdx": "^3.1.3", "@astrojs/sitemap": "^3.1.6", @@ -204,6 +204,7 @@ "hast-util-select": "^6.0.2", "hast-util-to-string": "^3.0.0", "hastscript": "^9.0.0", + "i18next": "^23.11.5", "mdast-util-directive": "^3.0.0", "mdast-util-to-markdown": "^2.1.0", "mdast-util-to-string": "^4.0.0", @@ -216,7 +217,7 @@ "vfile": "^6.0.2" }, "peerDependencies": { - "astro": "^4.8.6" + "astro": "^4.14.0" } }, "node_modules/@astrojs/starlight-tailwind": { @@ -247,17 +248,16 @@ } }, "node_modules/@astrojs/tailwind": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@astrojs/tailwind/-/tailwind-5.1.0.tgz", - "integrity": "sha512-BJoCDKuWhU9FT2qYg+fr6Nfb3qP4ShtyjXGHKA/4mHN94z7BGcmauQK23iy+YH5qWvTnhqkd6mQPQ1yTZTe9Ig==", - "license": "MIT", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@astrojs/tailwind/-/tailwind-5.1.1.tgz", + "integrity": "sha512-LwurA10uIKcGRxQP2R81RvAnBT0WPKzBntXZBF4hrAefDgM5Uumn0nsGr6tdIjSARgYz4X+Cq/Vh78t3bql3yw==", "dependencies": { - "autoprefixer": "^10.4.15", - "postcss": "^8.4.28", + "autoprefixer": "^10.4.20", + "postcss": "^8.4.45", "postcss-load-config": "^4.0.2" }, "peerDependencies": { - "astro": "^3.0.0 || ^4.0.0", + "astro": "^3.0.0 || ^4.0.0 || ^5.0.0-beta.0", "tailwindcss": "^3.0.24" } }, @@ -536,6 +536,17 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/runtime": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", + "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/template": { "version": "7.25.0", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", @@ -1538,9 +1549,9 @@ } }, "node_modules/@oslojs/encoding": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@oslojs/encoding/-/encoding-0.4.1.tgz", - "integrity": "sha512-hkjo6MuIK/kQR5CrGNdAPZhS01ZCXuWDRJ187zh6qqF2+yMHZpD9fAYpX8q2bOO6Ryhl3XpCT6kUX76N8hhm4Q==" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@oslojs/encoding/-/encoding-1.0.0.tgz", + "integrity": "sha512-dyIB0SdZgMm5BhGwdSp8rMxEFIopLKxDG1vxIBaiogyom6ZqH2aXPb6DEC2WzOOWKdPSq1cxdNeRx2wAn1Z+ZQ==" }, "node_modules/@pagefind/darwin-arm64": { "version": "1.1.1", @@ -2253,9 +2264,9 @@ } }, "node_modules/astro": { - "version": "4.15.6", - "resolved": "https://registry.npmjs.org/astro/-/astro-4.15.6.tgz", - "integrity": "sha512-SWcUNwm8CiVRaIbh4w5byh62BNihpsovlCd4ElvC7cL/53D24HcI7AaGFsPrromCamQklwQmIan/QS7x/3lLuQ==", + "version": "4.15.9", + "resolved": "https://registry.npmjs.org/astro/-/astro-4.15.9.tgz", + "integrity": "sha512-51oXq9qrZ5OPWYmEXt1kGrvWmVeWsx28SgBTzi2XW6iwcnW/wC5ONm6ol6qBGSCF93tQvZplXvuzpaw1injECA==", "dependencies": { "@astrojs/compiler": "^2.10.3", "@astrojs/internal-helpers": "0.4.1", @@ -2264,7 +2275,7 @@ "@babel/core": "^7.25.2", "@babel/plugin-transform-react-jsx": "^7.25.2", "@babel/types": "^7.25.6", - "@oslojs/encoding": "^0.4.1", + "@oslojs/encoding": "^1.0.0", "@rollup/pluginutils": "^5.1.0", "@types/babel__core": "^7.20.5", "@types/cookie": "^0.6.0", @@ -2303,7 +2314,6 @@ "ora": "^8.1.0", "p-limit": "^6.1.0", "p-queue": "^8.0.1", - "path-to-regexp": "6.2.2", "preferred-pm": "^4.0.0", "prompts": "^2.4.2", "rehype": "^13.0.1", @@ -3897,6 +3907,28 @@ "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==" }, + "node_modules/i18next": { + "version": "23.15.1", + "resolved": "https://registry.npmjs.org/i18next/-/i18next-23.15.1.tgz", + "integrity": "sha512-wB4abZ3uK7EWodYisHl/asf8UYEhrI/vj/8aoSsrj/ZDxj4/UXPOa1KvFt1Fq5hkUHquNqwFlDprmjZ8iySgYA==", + "funding": [ + { + "type": "individual", + "url": "https://locize.com" + }, + { + "type": "individual", + "url": "https://locize.com/i18next.html" + }, + { + "type": "individual", + "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project" + } + ], + "dependencies": { + "@babel/runtime": "^7.23.2" + } + }, "node_modules/import-meta-resolve": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.1.0.tgz", @@ -5808,11 +5840,6 @@ "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", "license": "ISC" }, - "node_modules/path-to-regexp": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz", - "integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==" - }, "node_modules/periscopic": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz", @@ -6212,6 +6239,11 @@ "node": ">=8.10.0" } }, + "node_modules/regenerator-runtime": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" + }, "node_modules/regex": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/regex/-/regex-4.3.2.tgz", @@ -6988,9 +7020,9 @@ } }, "node_modules/tailwindcss": { - "version": "3.4.11", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.11.tgz", - "integrity": "sha512-qhEuBcLemjSJk5ajccN9xJFtM/h0AVCPaA6C92jNP+M2J8kX+eMJHI7R2HFKUvvAsMpcfLILMCFYSeDwpMmlUg==", + "version": "3.4.13", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.13.tgz", + "integrity": "sha512-KqjHOJKogOUt5Bs752ykCeiwvi0fKVkr5oqsFNt/8px/tA8scFPIlkygsf6jXrfCqGHz7VflA6+yytWuM+XhFw==", "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", @@ -8254,9 +8286,9 @@ } }, "@astrojs/starlight": { - "version": "0.27.1", - "resolved": "https://registry.npmjs.org/@astrojs/starlight/-/starlight-0.27.1.tgz", - "integrity": "sha512-L2hEgN/Tk7tfBDeaqUOgOpey5NcUL78FuQa06iNxyZ6RjyYyuXSniOoFxZYIo5PpY9O1dLdK22PkZyCDpO729g==", + "version": "0.28.2", + "resolved": "https://registry.npmjs.org/@astrojs/starlight/-/starlight-0.28.2.tgz", + "integrity": "sha512-Q1/Ujl2EzWX71qwqdt/0KP3wOyX6Rvyzcep/zD3hRCtw/Vi2TReh4Q2wLwz7mnbuYU9H7YvBKYknbkmjC+K/0w==", "requires": { "@astrojs/mdx": "^3.1.3", "@astrojs/sitemap": "^3.1.6", @@ -8269,6 +8301,7 @@ "hast-util-select": "^6.0.2", "hast-util-to-string": "^3.0.0", "hastscript": "^9.0.0", + "i18next": "^23.11.5", "mdast-util-directive": "^3.0.0", "mdast-util-to-markdown": "^2.1.0", "mdast-util-to-string": "^4.0.0", @@ -8302,12 +8335,12 @@ "requires": {} }, "@astrojs/tailwind": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@astrojs/tailwind/-/tailwind-5.1.0.tgz", - "integrity": "sha512-BJoCDKuWhU9FT2qYg+fr6Nfb3qP4ShtyjXGHKA/4mHN94z7BGcmauQK23iy+YH5qWvTnhqkd6mQPQ1yTZTe9Ig==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@astrojs/tailwind/-/tailwind-5.1.1.tgz", + "integrity": "sha512-LwurA10uIKcGRxQP2R81RvAnBT0WPKzBntXZBF4hrAefDgM5Uumn0nsGr6tdIjSARgYz4X+Cq/Vh78t3bql3yw==", "requires": { - "autoprefixer": "^10.4.15", - "postcss": "^8.4.28", + "autoprefixer": "^10.4.20", + "postcss": "^8.4.45", "postcss-load-config": "^4.0.2" } }, @@ -8511,6 +8544,14 @@ "@babel/types": "^7.25.2" } }, + "@babel/runtime": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", + "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "requires": { + "regenerator-runtime": "^0.14.0" + } + }, "@babel/template": { "version": "7.25.0", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", @@ -9059,9 +9100,9 @@ } }, "@oslojs/encoding": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@oslojs/encoding/-/encoding-0.4.1.tgz", - "integrity": "sha512-hkjo6MuIK/kQR5CrGNdAPZhS01ZCXuWDRJ187zh6qqF2+yMHZpD9fAYpX8q2bOO6Ryhl3XpCT6kUX76N8hhm4Q==" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@oslojs/encoding/-/encoding-1.0.0.tgz", + "integrity": "sha512-dyIB0SdZgMm5BhGwdSp8rMxEFIopLKxDG1vxIBaiogyom6ZqH2aXPb6DEC2WzOOWKdPSq1cxdNeRx2wAn1Z+ZQ==" }, "@pagefind/darwin-arm64": { "version": "1.1.1", @@ -9595,9 +9636,9 @@ "integrity": "sha512-ISvCdHdlTDlH5IpxQJIex7BWBywFWgjJSVdwst+/iQCoEYnyOaQ95+X1JGshuBjGp6nxKUy1jMgE3zPqN7fQdg==" }, "astro": { - "version": "4.15.6", - "resolved": "https://registry.npmjs.org/astro/-/astro-4.15.6.tgz", - "integrity": "sha512-SWcUNwm8CiVRaIbh4w5byh62BNihpsovlCd4ElvC7cL/53D24HcI7AaGFsPrromCamQklwQmIan/QS7x/3lLuQ==", + "version": "4.15.9", + "resolved": "https://registry.npmjs.org/astro/-/astro-4.15.9.tgz", + "integrity": "sha512-51oXq9qrZ5OPWYmEXt1kGrvWmVeWsx28SgBTzi2XW6iwcnW/wC5ONm6ol6qBGSCF93tQvZplXvuzpaw1injECA==", "requires": { "@astrojs/compiler": "^2.10.3", "@astrojs/internal-helpers": "0.4.1", @@ -9606,7 +9647,7 @@ "@babel/core": "^7.25.2", "@babel/plugin-transform-react-jsx": "^7.25.2", "@babel/types": "^7.25.6", - "@oslojs/encoding": "^0.4.1", + "@oslojs/encoding": "^1.0.0", "@rollup/pluginutils": "^5.1.0", "@types/babel__core": "^7.20.5", "@types/cookie": "^0.6.0", @@ -9645,7 +9686,6 @@ "ora": "^8.1.0", "p-limit": "^6.1.0", "p-queue": "^8.0.1", - "path-to-regexp": "6.2.2", "preferred-pm": "^4.0.0", "prompts": "^2.4.2", "rehype": "^13.0.1", @@ -10749,6 +10789,14 @@ "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==" }, + "i18next": { + "version": "23.15.1", + "resolved": "https://registry.npmjs.org/i18next/-/i18next-23.15.1.tgz", + "integrity": "sha512-wB4abZ3uK7EWodYisHl/asf8UYEhrI/vj/8aoSsrj/ZDxj4/UXPOa1KvFt1Fq5hkUHquNqwFlDprmjZ8iySgYA==", + "requires": { + "@babel/runtime": "^7.23.2" + } + }, "import-meta-resolve": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.1.0.tgz", @@ -11996,11 +12044,6 @@ } } }, - "path-to-regexp": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz", - "integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==" - }, "periscopic": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz", @@ -12183,6 +12226,11 @@ "picomatch": "^2.2.1" } }, + "regenerator-runtime": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" + }, "regex": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/regex/-/regex-4.3.2.tgz", @@ -12733,9 +12781,9 @@ "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" }, "tailwindcss": { - "version": "3.4.11", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.11.tgz", - "integrity": "sha512-qhEuBcLemjSJk5ajccN9xJFtM/h0AVCPaA6C92jNP+M2J8kX+eMJHI7R2HFKUvvAsMpcfLILMCFYSeDwpMmlUg==", + "version": "3.4.13", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.13.tgz", + "integrity": "sha512-KqjHOJKogOUt5Bs752ykCeiwvi0fKVkr5oqsFNt/8px/tA8scFPIlkygsf6jXrfCqGHz7VflA6+yytWuM+XhFw==", "requires": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", diff --git a/docs/package.json b/docs/package.json index a983fc262..7720f4c59 100644 --- a/docs/package.json +++ b/docs/package.json @@ -12,15 +12,15 @@ }, "dependencies": { "@astrojs/check": "^0.9.3", - "@astrojs/starlight": "^0.27.1", + "@astrojs/starlight": "^0.28.2", "@astrojs/starlight-tailwind": "^2.0.3", - "@astrojs/tailwind": "^5.1.0", - "astro": "^4.15.6", + "@astrojs/tailwind": "^5.1.1", + "astro": "^4.15.9", "prettier": "^3.3.3", "prettier-plugin-astro": "^0.14.1", "prettier-plugin-tailwindcss": "^0.6.6", "sharp": "^0.33.5", - "tailwindcss": "^3.4.11", + "tailwindcss": "^3.4.13", "typescript": "^5.6.2" } } From e20e0847eb16e2d6471e5d497f02ddbcdde9a504 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 09:05:35 +0200 Subject: [PATCH 12/20] chore(deps): bump rollup in /docs in the npm_and_yarn group (#2871) Bumps the npm_and_yarn group in /docs with 1 update: [rollup](https://github.com/rollup/rollup). Updates `rollup` from 4.21.2 to 4.22.4 - [Release notes](https://github.com/rollup/rollup/releases) - [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md) - [Commits](https://github.com/rollup/rollup/compare/v4.21.2...v4.22.4) --- updated-dependencies: - dependency-name: rollup dependency-type: indirect dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/package-lock.json | 270 ++++++++++++++++++++--------------------- 1 file changed, 135 insertions(+), 135 deletions(-) diff --git a/docs/package-lock.json b/docs/package-lock.json index 5e6983746..00c1d1618 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -1655,9 +1655,9 @@ "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==" }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.21.2.tgz", - "integrity": "sha512-fSuPrt0ZO8uXeS+xP3b+yYTCBUd05MoSp2N/MFOgjhhUhMmchXlpTQrTpI8T+YAwAQuK7MafsCOxW7VrPMrJcg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz", + "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==", "cpu": [ "arm" ], @@ -1667,9 +1667,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.21.2.tgz", - "integrity": "sha512-xGU5ZQmPlsjQS6tzTTGwMsnKUtu0WVbl0hYpTPauvbRAnmIvpInhJtgjj3mcuJpEiuUw4v1s4BimkdfDWlh7gA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz", + "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==", "cpu": [ "arm64" ], @@ -1679,9 +1679,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.21.2.tgz", - "integrity": "sha512-99AhQ3/ZMxU7jw34Sq8brzXqWH/bMnf7ZVhvLk9QU2cOepbQSVTns6qoErJmSiAvU3InRqC2RRZ5ovh1KN0d0Q==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz", + "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==", "cpu": [ "arm64" ], @@ -1691,9 +1691,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.21.2.tgz", - "integrity": "sha512-ZbRaUvw2iN/y37x6dY50D8m2BnDbBjlnMPotDi/qITMJ4sIxNY33HArjikDyakhSv0+ybdUxhWxE6kTI4oX26w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz", + "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==", "cpu": [ "x64" ], @@ -1703,9 +1703,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.21.2.tgz", - "integrity": "sha512-ztRJJMiE8nnU1YFcdbd9BcH6bGWG1z+jP+IPW2oDUAPxPjo9dverIOyXz76m6IPA6udEL12reYeLojzW2cYL7w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz", + "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==", "cpu": [ "arm" ], @@ -1715,9 +1715,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.21.2.tgz", - "integrity": "sha512-flOcGHDZajGKYpLV0JNc0VFH361M7rnV1ee+NTeC/BQQ1/0pllYcFmxpagltANYt8FYf9+kL6RSk80Ziwyhr7w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz", + "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==", "cpu": [ "arm" ], @@ -1727,9 +1727,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.21.2.tgz", - "integrity": "sha512-69CF19Kp3TdMopyteO/LJbWufOzqqXzkrv4L2sP8kfMaAQ6iwky7NoXTp7bD6/irKgknDKM0P9E/1l5XxVQAhw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz", + "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==", "cpu": [ "arm64" ], @@ -1739,9 +1739,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.21.2.tgz", - "integrity": "sha512-48pD/fJkTiHAZTnZwR0VzHrao70/4MlzJrq0ZsILjLW/Ab/1XlVUStYyGt7tdyIiVSlGZbnliqmult/QGA2O2w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz", + "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==", "cpu": [ "arm64" ], @@ -1751,9 +1751,9 @@ ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.21.2.tgz", - "integrity": "sha512-cZdyuInj0ofc7mAQpKcPR2a2iu4YM4FQfuUzCVA2u4HI95lCwzjoPtdWjdpDKyHxI0UO82bLDoOaLfpZ/wviyQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz", + "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==", "cpu": [ "ppc64" ], @@ -1763,9 +1763,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.21.2.tgz", - "integrity": "sha512-RL56JMT6NwQ0lXIQmMIWr1SW28z4E4pOhRRNqwWZeXpRlykRIlEpSWdsgNWJbYBEWD84eocjSGDu/XxbYeCmwg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz", + "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==", "cpu": [ "riscv64" ], @@ -1775,9 +1775,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.21.2.tgz", - "integrity": "sha512-PMxkrWS9z38bCr3rWvDFVGD6sFeZJw4iQlhrup7ReGmfn7Oukrr/zweLhYX6v2/8J6Cep9IEA/SmjXjCmSbrMQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz", + "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==", "cpu": [ "s390x" ], @@ -1787,9 +1787,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.21.2.tgz", - "integrity": "sha512-B90tYAUoLhU22olrafY3JQCFLnT3NglazdwkHyxNDYF/zAxJt5fJUB/yBoWFoIQ7SQj+KLe3iL4BhOMa9fzgpw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz", + "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==", "cpu": [ "x64" ], @@ -1799,9 +1799,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.21.2.tgz", - "integrity": "sha512-7twFizNXudESmC9oneLGIUmoHiiLppz/Xs5uJQ4ShvE6234K0VB1/aJYU3f/4g7PhssLGKBVCC37uRkkOi8wjg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz", + "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==", "cpu": [ "x64" ], @@ -1811,9 +1811,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.21.2.tgz", - "integrity": "sha512-9rRero0E7qTeYf6+rFh3AErTNU1VCQg2mn7CQcI44vNUWM9Ze7MSRS/9RFuSsox+vstRt97+x3sOhEey024FRQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz", + "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==", "cpu": [ "arm64" ], @@ -1823,9 +1823,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.21.2.tgz", - "integrity": "sha512-5rA4vjlqgrpbFVVHX3qkrCo/fZTj1q0Xxpg+Z7yIo3J2AilW7t2+n6Q8Jrx+4MrYpAnjttTYF8rr7bP46BPzRw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz", + "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==", "cpu": [ "ia32" ], @@ -1835,9 +1835,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.21.2.tgz", - "integrity": "sha512-6UUxd0+SKomjdzuAcp+HAmxw1FlGBnl1v2yEPSabtx4lBfdXHDVsW7+lQkgz9cNFJGY3AWR7+V8P5BqkD9L9nA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz", + "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==", "cpu": [ "x64" ], @@ -6573,9 +6573,9 @@ } }, "node_modules/rollup": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.21.2.tgz", - "integrity": "sha512-e3TapAgYf9xjdLvKQCkQTnbTKd4a6jwlpQSJJFokHGaX2IVjoEqkIIhiQfqsi0cdwlOD+tQGuOd5AJkc5RngBw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz", + "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==", "dependencies": { "@types/estree": "1.0.5" }, @@ -6587,22 +6587,22 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.21.2", - "@rollup/rollup-android-arm64": "4.21.2", - "@rollup/rollup-darwin-arm64": "4.21.2", - "@rollup/rollup-darwin-x64": "4.21.2", - "@rollup/rollup-linux-arm-gnueabihf": "4.21.2", - "@rollup/rollup-linux-arm-musleabihf": "4.21.2", - "@rollup/rollup-linux-arm64-gnu": "4.21.2", - "@rollup/rollup-linux-arm64-musl": "4.21.2", - "@rollup/rollup-linux-powerpc64le-gnu": "4.21.2", - "@rollup/rollup-linux-riscv64-gnu": "4.21.2", - "@rollup/rollup-linux-s390x-gnu": "4.21.2", - "@rollup/rollup-linux-x64-gnu": "4.21.2", - "@rollup/rollup-linux-x64-musl": "4.21.2", - "@rollup/rollup-win32-arm64-msvc": "4.21.2", - "@rollup/rollup-win32-ia32-msvc": "4.21.2", - "@rollup/rollup-win32-x64-msvc": "4.21.2", + "@rollup/rollup-android-arm-eabi": "4.22.4", + "@rollup/rollup-android-arm64": "4.22.4", + "@rollup/rollup-darwin-arm64": "4.22.4", + "@rollup/rollup-darwin-x64": "4.22.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.22.4", + "@rollup/rollup-linux-arm-musleabihf": "4.22.4", + "@rollup/rollup-linux-arm64-gnu": "4.22.4", + "@rollup/rollup-linux-arm64-musl": "4.22.4", + "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4", + "@rollup/rollup-linux-riscv64-gnu": "4.22.4", + "@rollup/rollup-linux-s390x-gnu": "4.22.4", + "@rollup/rollup-linux-x64-gnu": "4.22.4", + "@rollup/rollup-linux-x64-musl": "4.22.4", + "@rollup/rollup-win32-arm64-msvc": "4.22.4", + "@rollup/rollup-win32-ia32-msvc": "4.22.4", + "@rollup/rollup-win32-x64-msvc": "4.22.4", "fsevents": "~2.3.2" } }, @@ -9163,99 +9163,99 @@ } }, "@rollup/rollup-android-arm-eabi": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.21.2.tgz", - "integrity": "sha512-fSuPrt0ZO8uXeS+xP3b+yYTCBUd05MoSp2N/MFOgjhhUhMmchXlpTQrTpI8T+YAwAQuK7MafsCOxW7VrPMrJcg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz", + "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==", "optional": true }, "@rollup/rollup-android-arm64": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.21.2.tgz", - "integrity": "sha512-xGU5ZQmPlsjQS6tzTTGwMsnKUtu0WVbl0hYpTPauvbRAnmIvpInhJtgjj3mcuJpEiuUw4v1s4BimkdfDWlh7gA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz", + "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==", "optional": true }, "@rollup/rollup-darwin-arm64": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.21.2.tgz", - "integrity": "sha512-99AhQ3/ZMxU7jw34Sq8brzXqWH/bMnf7ZVhvLk9QU2cOepbQSVTns6qoErJmSiAvU3InRqC2RRZ5ovh1KN0d0Q==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz", + "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==", "optional": true }, "@rollup/rollup-darwin-x64": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.21.2.tgz", - "integrity": "sha512-ZbRaUvw2iN/y37x6dY50D8m2BnDbBjlnMPotDi/qITMJ4sIxNY33HArjikDyakhSv0+ybdUxhWxE6kTI4oX26w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz", + "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==", "optional": true }, "@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.21.2.tgz", - "integrity": "sha512-ztRJJMiE8nnU1YFcdbd9BcH6bGWG1z+jP+IPW2oDUAPxPjo9dverIOyXz76m6IPA6udEL12reYeLojzW2cYL7w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz", + "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==", "optional": true }, "@rollup/rollup-linux-arm-musleabihf": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.21.2.tgz", - "integrity": "sha512-flOcGHDZajGKYpLV0JNc0VFH361M7rnV1ee+NTeC/BQQ1/0pllYcFmxpagltANYt8FYf9+kL6RSk80Ziwyhr7w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz", + "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==", "optional": true }, "@rollup/rollup-linux-arm64-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.21.2.tgz", - "integrity": "sha512-69CF19Kp3TdMopyteO/LJbWufOzqqXzkrv4L2sP8kfMaAQ6iwky7NoXTp7bD6/irKgknDKM0P9E/1l5XxVQAhw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz", + "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==", "optional": true }, "@rollup/rollup-linux-arm64-musl": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.21.2.tgz", - "integrity": "sha512-48pD/fJkTiHAZTnZwR0VzHrao70/4MlzJrq0ZsILjLW/Ab/1XlVUStYyGt7tdyIiVSlGZbnliqmult/QGA2O2w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz", + "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==", "optional": true }, "@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.21.2.tgz", - "integrity": "sha512-cZdyuInj0ofc7mAQpKcPR2a2iu4YM4FQfuUzCVA2u4HI95lCwzjoPtdWjdpDKyHxI0UO82bLDoOaLfpZ/wviyQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz", + "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==", "optional": true }, "@rollup/rollup-linux-riscv64-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.21.2.tgz", - "integrity": "sha512-RL56JMT6NwQ0lXIQmMIWr1SW28z4E4pOhRRNqwWZeXpRlykRIlEpSWdsgNWJbYBEWD84eocjSGDu/XxbYeCmwg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz", + "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==", "optional": true }, "@rollup/rollup-linux-s390x-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.21.2.tgz", - "integrity": "sha512-PMxkrWS9z38bCr3rWvDFVGD6sFeZJw4iQlhrup7ReGmfn7Oukrr/zweLhYX6v2/8J6Cep9IEA/SmjXjCmSbrMQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz", + "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==", "optional": true }, "@rollup/rollup-linux-x64-gnu": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.21.2.tgz", - "integrity": "sha512-B90tYAUoLhU22olrafY3JQCFLnT3NglazdwkHyxNDYF/zAxJt5fJUB/yBoWFoIQ7SQj+KLe3iL4BhOMa9fzgpw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz", + "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==", "optional": true }, "@rollup/rollup-linux-x64-musl": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.21.2.tgz", - "integrity": "sha512-7twFizNXudESmC9oneLGIUmoHiiLppz/Xs5uJQ4ShvE6234K0VB1/aJYU3f/4g7PhssLGKBVCC37uRkkOi8wjg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz", + "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==", "optional": true }, "@rollup/rollup-win32-arm64-msvc": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.21.2.tgz", - "integrity": "sha512-9rRero0E7qTeYf6+rFh3AErTNU1VCQg2mn7CQcI44vNUWM9Ze7MSRS/9RFuSsox+vstRt97+x3sOhEey024FRQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz", + "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==", "optional": true }, "@rollup/rollup-win32-ia32-msvc": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.21.2.tgz", - "integrity": "sha512-5rA4vjlqgrpbFVVHX3qkrCo/fZTj1q0Xxpg+Z7yIo3J2AilW7t2+n6Q8Jrx+4MrYpAnjttTYF8rr7bP46BPzRw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz", + "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==", "optional": true }, "@rollup/rollup-win32-x64-msvc": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.21.2.tgz", - "integrity": "sha512-6UUxd0+SKomjdzuAcp+HAmxw1FlGBnl1v2yEPSabtx4lBfdXHDVsW7+lQkgz9cNFJGY3AWR7+V8P5BqkD9L9nA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz", + "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==", "optional": true }, "@shikijs/core": { @@ -12470,26 +12470,26 @@ "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" }, "rollup": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.21.2.tgz", - "integrity": "sha512-e3TapAgYf9xjdLvKQCkQTnbTKd4a6jwlpQSJJFokHGaX2IVjoEqkIIhiQfqsi0cdwlOD+tQGuOd5AJkc5RngBw==", - "requires": { - "@rollup/rollup-android-arm-eabi": "4.21.2", - "@rollup/rollup-android-arm64": "4.21.2", - "@rollup/rollup-darwin-arm64": "4.21.2", - "@rollup/rollup-darwin-x64": "4.21.2", - "@rollup/rollup-linux-arm-gnueabihf": "4.21.2", - "@rollup/rollup-linux-arm-musleabihf": "4.21.2", - "@rollup/rollup-linux-arm64-gnu": "4.21.2", - "@rollup/rollup-linux-arm64-musl": "4.21.2", - "@rollup/rollup-linux-powerpc64le-gnu": "4.21.2", - "@rollup/rollup-linux-riscv64-gnu": "4.21.2", - "@rollup/rollup-linux-s390x-gnu": "4.21.2", - "@rollup/rollup-linux-x64-gnu": "4.21.2", - "@rollup/rollup-linux-x64-musl": "4.21.2", - "@rollup/rollup-win32-arm64-msvc": "4.21.2", - "@rollup/rollup-win32-ia32-msvc": "4.21.2", - "@rollup/rollup-win32-x64-msvc": "4.21.2", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz", + "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==", + "requires": { + "@rollup/rollup-android-arm-eabi": "4.22.4", + "@rollup/rollup-android-arm64": "4.22.4", + "@rollup/rollup-darwin-arm64": "4.22.4", + "@rollup/rollup-darwin-x64": "4.22.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.22.4", + "@rollup/rollup-linux-arm-musleabihf": "4.22.4", + "@rollup/rollup-linux-arm64-gnu": "4.22.4", + "@rollup/rollup-linux-arm64-musl": "4.22.4", + "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4", + "@rollup/rollup-linux-riscv64-gnu": "4.22.4", + "@rollup/rollup-linux-s390x-gnu": "4.22.4", + "@rollup/rollup-linux-x64-gnu": "4.22.4", + "@rollup/rollup-linux-x64-musl": "4.22.4", + "@rollup/rollup-win32-arm64-msvc": "4.22.4", + "@rollup/rollup-win32-ia32-msvc": "4.22.4", + "@rollup/rollup-win32-x64-msvc": "4.22.4", "@types/estree": "1.0.5", "fsevents": "~2.3.2" } From d9fd12f86de497cba98d4e58565a9aaba54902a1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 09:15:54 +0200 Subject: [PATCH 13/20] chore(deps): bump com.github.luben:zstd-jni in /backend (#2866) Bumps [com.github.luben:zstd-jni](https://github.com/luben/zstd-jni) from 1.5.6-5 to 1.5.6-6. - [Commits](https://github.com/luben/zstd-jni/compare/v1.5.6-5...v1.5.6-6) --- updated-dependencies: - dependency-name: com.github.luben:zstd-jni dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- backend/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/build.gradle b/backend/build.gradle index a5408e99b..816304c0a 100644 --- a/backend/build.gradle +++ b/backend/build.gradle @@ -50,7 +50,7 @@ dependencies { implementation "org.springframework.boot:spring-boot-starter-security" implementation 'org.apache.commons:commons-compress:1.27.1' - implementation 'com.github.luben:zstd-jni:1.5.6-5' + implementation 'com.github.luben:zstd-jni:1.5.6-6' implementation 'org.tukaani:xz:1.10' implementation("org.redundent:kotlin-xml-builder:1.9.1") From c3e78779e7f03a7b4564d43590621d637b0db1ae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 09:16:11 +0200 Subject: [PATCH 14/20] chore(deps): bump the npm_and_yarn group in /website with 3 updates (#2869) Bumps the npm_and_yarn group in /website with 3 updates: [rollup](https://github.com/rollup/rollup), [send](https://github.com/pillarjs/send) and [@astrojs/node](https://github.com/withastro/adapters/tree/HEAD/packages/node). Updates `rollup` from 4.21.0 to 4.22.4 - [Release notes](https://github.com/rollup/rollup/releases) - [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md) - [Commits](https://github.com/rollup/rollup/compare/v4.21.0...v4.22.4) Updates `send` from 0.18.0 to 0.19.0 - [Release notes](https://github.com/pillarjs/send/releases) - [Changelog](https://github.com/pillarjs/send/blob/master/HISTORY.md) - [Commits](https://github.com/pillarjs/send/compare/0.18.0...0.19.0) Updates `@astrojs/node` from 8.3.3 to 8.3.4 - [Release notes](https://github.com/withastro/adapters/releases) - [Changelog](https://github.com/withastro/adapters/blob/main/packages/node/CHANGELOG.md) - [Commits](https://github.com/withastro/adapters/commits/@astrojs/node@8.3.4/packages/node) --- updated-dependencies: - dependency-name: rollup dependency-type: indirect dependency-group: npm_and_yarn - dependency-name: send dependency-type: indirect dependency-group: npm_and_yarn - dependency-name: "@astrojs/node" dependency-type: direct:production dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- website/package-lock.json | 195 ++++++++++++++++---------------------- website/package.json | 2 +- 2 files changed, 81 insertions(+), 116 deletions(-) diff --git a/website/package-lock.json b/website/package-lock.json index a7f6963f2..09f0a7a10 100644 --- a/website/package-lock.json +++ b/website/package-lock.json @@ -9,7 +9,7 @@ "version": "0.0.1", "dependencies": { "@astrojs/mdx": "^3.1.5", - "@astrojs/node": "^8.3.3", + "@astrojs/node": "^8.3.4", "@emotion/react": "^11.13.3", "@headlessui/react": "^2.1.7", "@mui/material": "~5.14.20", @@ -266,12 +266,11 @@ } }, "node_modules/@astrojs/node": { - "version": "8.3.3", - "resolved": "https://registry.npmjs.org/@astrojs/node/-/node-8.3.3.tgz", - "integrity": "sha512-idrKhnnPSi0ABV+PCQsRQqVNwpOvVDF/+fkwcIiE8sr9J8EMvW9g/oyAt8T4X2OBJ8FUzYPL8klfCdG7r0eB5g==", - "license": "MIT", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@astrojs/node/-/node-8.3.4.tgz", + "integrity": "sha512-xzQs39goN7xh9np9rypGmbgZj3AmmjNxEMj9ZWz5aBERlqqFF3n8A/w/uaJeZ/bkHS60l1BXVS0tgsQt9MFqBA==", "dependencies": { - "send": "^0.18.0", + "send": "^0.19.0", "server-destroy": "^1.0.1" }, "peerDependencies": { @@ -2863,208 +2862,192 @@ "license": "MIT" }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.21.0.tgz", - "integrity": "sha512-WTWD8PfoSAJ+qL87lE7votj3syLavxunWhzCnx3XFxFiI/BA/r3X7MUM8dVrH8rb2r4AiO8jJsr3ZjdaftmnfA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz", + "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==", "cpu": [ "arm" ], - "license": "MIT", "optional": true, "os": [ "android" ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.21.0.tgz", - "integrity": "sha512-a1sR2zSK1B4eYkiZu17ZUZhmUQcKjk2/j9Me2IDjk1GHW7LB5Z35LEzj9iJch6gtUfsnvZs1ZNyDW2oZSThrkA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz", + "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "android" ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.21.0.tgz", - "integrity": "sha512-zOnKWLgDld/svhKO5PD9ozmL6roy5OQ5T4ThvdYZLpiOhEGY+dp2NwUmxK0Ld91LrbjrvtNAE0ERBwjqhZTRAA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz", + "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "darwin" ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.21.0.tgz", - "integrity": "sha512-7doS8br0xAkg48SKE2QNtMSFPFUlRdw9+votl27MvT46vo44ATBmdZdGysOevNELmZlfd+NEa0UYOA8f01WSrg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz", + "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "darwin" ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.21.0.tgz", - "integrity": "sha512-pWJsfQjNWNGsoCq53KjMtwdJDmh/6NubwQcz52aEwLEuvx08bzcy6tOUuawAOncPnxz/3siRtd8hiQ32G1y8VA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz", + "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==", "cpu": [ "arm" ], - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.21.0.tgz", - "integrity": "sha512-efRIANsz3UHZrnZXuEvxS9LoCOWMGD1rweciD6uJQIx2myN3a8Im1FafZBzh7zk1RJ6oKcR16dU3UPldaKd83w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz", + "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==", "cpu": [ "arm" ], - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.21.0.tgz", - "integrity": "sha512-ZrPhydkTVhyeGTW94WJ8pnl1uroqVHM3j3hjdquwAcWnmivjAwOYjTEAuEDeJvGX7xv3Z9GAvrBkEzCgHq9U1w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz", + "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.21.0.tgz", - "integrity": "sha512-cfaupqd+UEFeURmqNP2eEvXqgbSox/LHOyN9/d2pSdV8xTrjdg3NgOFJCtc1vQ/jEke1qD0IejbBfxleBPHnPw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz", + "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.21.0.tgz", - "integrity": "sha512-ZKPan1/RvAhrUylwBXC9t7B2hXdpb/ufeu22pG2psV7RN8roOfGurEghw1ySmX/CmDDHNTDDjY3lo9hRlgtaHg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz", + "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==", "cpu": [ "ppc64" ], - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.21.0.tgz", - "integrity": "sha512-H1eRaCwd5E8eS8leiS+o/NqMdljkcb1d6r2h4fKSsCXQilLKArq6WS7XBLDu80Yz+nMqHVFDquwcVrQmGr28rg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz", + "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==", "cpu": [ "riscv64" ], - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.21.0.tgz", - "integrity": "sha512-zJ4hA+3b5tu8u7L58CCSI0A9N1vkfwPhWd/puGXwtZlsB5bTkwDNW/+JCU84+3QYmKpLi+XvHdmrlwUwDA6kqw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz", + "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==", "cpu": [ "s390x" ], - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.21.0.tgz", - "integrity": "sha512-e2hrvElFIh6kW/UNBQK/kzqMNY5mO+67YtEh9OA65RM5IJXYTWiXjX6fjIiPaqOkBthYF1EqgiZ6OXKcQsM0hg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz", + "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.21.0.tgz", - "integrity": "sha512-1vvmgDdUSebVGXWX2lIcgRebqfQSff0hMEkLJyakQ9JQUbLDkEaMsPTLOmyccyC6IJ/l3FZuJbmrBw/u0A0uCQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz", + "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.21.0.tgz", - "integrity": "sha512-s5oFkZ/hFcrlAyBTONFY1TWndfyre1wOMwU+6KCpm/iatybvrRgmZVM+vCFwxmC5ZhdlgfE0N4XorsDpi7/4XQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz", + "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "win32" ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.21.0.tgz", - "integrity": "sha512-G9+TEqRnAA6nbpqyUqgTiopmnfgnMkR3kMukFBDsiyy23LZvUCpiUwjTRx6ezYCjJODXrh52rBR9oXvm+Fp5wg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz", + "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==", "cpu": [ "ia32" ], - "license": "MIT", "optional": true, "os": [ "win32" ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.21.0.tgz", - "integrity": "sha512-2jsCDZwtQvRhejHLfZ1JY6w6kEuEtfF9nzYsZxzSlNVKDX+DpsDJ+Rbjkm74nvg2rdx0gwBS+IMdvwJuq3S9pQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz", + "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "win32" @@ -6367,7 +6350,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "license": "MIT", "engines": { "node": ">= 0.8" } @@ -6385,7 +6367,6 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", - "license": "MIT", "engines": { "node": ">= 0.8", "npm": "1.2.8000 || >= 1.4.16" @@ -6540,8 +6521,7 @@ "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", - "license": "MIT" + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "node_modules/electron-to-chromium": { "version": "1.5.12", @@ -6582,7 +6562,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", - "license": "MIT", "engines": { "node": ">= 0.8" } @@ -6851,8 +6830,7 @@ "node_modules/escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", - "license": "MIT" + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, "node_modules/escape-string-regexp": { "version": "4.0.0", @@ -7650,7 +7628,6 @@ "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", - "license": "MIT", "engines": { "node": ">= 0.6" } @@ -8007,7 +7984,6 @@ "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", - "license": "MIT", "engines": { "node": ">= 0.6" } @@ -8751,7 +8727,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "license": "MIT", "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", @@ -11310,7 +11285,6 @@ "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "license": "MIT", "bin": { "mime": "cli.js" }, @@ -11887,7 +11861,6 @@ "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "license": "MIT", "dependencies": { "ee-first": "1.1.1" }, @@ -12823,7 +12796,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "license": "MIT", "engines": { "node": ">= 0.6" } @@ -13404,10 +13376,9 @@ } }, "node_modules/rollup": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.21.0.tgz", - "integrity": "sha512-vo+S/lfA2lMS7rZ2Qoubi6I5hwZwzXeUIctILZLbHI+laNtvhhOIon2S1JksA5UEDQ7l3vberd0fxK44lTYjbQ==", - "license": "MIT", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz", + "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==", "dependencies": { "@types/estree": "1.0.5" }, @@ -13419,22 +13390,22 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.21.0", - "@rollup/rollup-android-arm64": "4.21.0", - "@rollup/rollup-darwin-arm64": "4.21.0", - "@rollup/rollup-darwin-x64": "4.21.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.21.0", - "@rollup/rollup-linux-arm-musleabihf": "4.21.0", - "@rollup/rollup-linux-arm64-gnu": "4.21.0", - "@rollup/rollup-linux-arm64-musl": "4.21.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.21.0", - "@rollup/rollup-linux-riscv64-gnu": "4.21.0", - "@rollup/rollup-linux-s390x-gnu": "4.21.0", - "@rollup/rollup-linux-x64-gnu": "4.21.0", - "@rollup/rollup-linux-x64-musl": "4.21.0", - "@rollup/rollup-win32-arm64-msvc": "4.21.0", - "@rollup/rollup-win32-ia32-msvc": "4.21.0", - "@rollup/rollup-win32-x64-msvc": "4.21.0", + "@rollup/rollup-android-arm-eabi": "4.22.4", + "@rollup/rollup-android-arm64": "4.22.4", + "@rollup/rollup-darwin-arm64": "4.22.4", + "@rollup/rollup-darwin-x64": "4.22.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.22.4", + "@rollup/rollup-linux-arm-musleabihf": "4.22.4", + "@rollup/rollup-linux-arm64-gnu": "4.22.4", + "@rollup/rollup-linux-arm64-musl": "4.22.4", + "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4", + "@rollup/rollup-linux-riscv64-gnu": "4.22.4", + "@rollup/rollup-linux-s390x-gnu": "4.22.4", + "@rollup/rollup-linux-x64-gnu": "4.22.4", + "@rollup/rollup-linux-x64-musl": "4.22.4", + "@rollup/rollup-win32-arm64-msvc": "4.22.4", + "@rollup/rollup-win32-ia32-msvc": "4.22.4", + "@rollup/rollup-win32-x64-msvc": "4.22.4", "fsevents": "~2.3.2" } }, @@ -13680,10 +13651,9 @@ } }, "node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", - "license": "MIT", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "dependencies": { "debug": "2.6.9", "depd": "2.0.0", @@ -13707,7 +13677,6 @@ "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", "dependencies": { "ms": "2.0.0" } @@ -13715,14 +13684,12 @@ "node_modules/send/node_modules/debug/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/send/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "license": "MIT" + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/server-destroy": { "version": "1.0.1", @@ -13785,8 +13752,7 @@ "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", - "license": "ISC" + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "node_modules/sharp": { "version": "0.33.5", @@ -14630,7 +14596,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", - "license": "MIT", "engines": { "node": ">=0.6" } diff --git a/website/package.json b/website/package.json index 403ef54f3..756d34e4e 100644 --- a/website/package.json +++ b/website/package.json @@ -22,7 +22,7 @@ }, "dependencies": { "@astrojs/mdx": "^3.1.5", - "@astrojs/node": "^8.3.3", + "@astrojs/node": "^8.3.4", "@emotion/react": "^11.13.3", "@headlessui/react": "^2.1.7", "@mui/material": "~5.14.20", From abaa6dbf2564245dd86696c5f26d0cc4d6865a66 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 09:25:15 +0200 Subject: [PATCH 15/20] chore(deps): bump rollup (#2870) Bumps the npm_and_yarn group in /keycloak/keycloakify with 1 update: [rollup](https://github.com/rollup/rollup). Updates `rollup` from 4.21.2 to 4.22.4 - [Release notes](https://github.com/rollup/rollup/releases) - [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md) - [Commits](https://github.com/rollup/rollup/compare/v4.21.2...v4.22.4) --- updated-dependencies: - dependency-name: rollup dependency-type: indirect dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- keycloak/keycloakify/yarn.lock | 136 ++++++++++++++++----------------- 1 file changed, 68 insertions(+), 68 deletions(-) diff --git a/keycloak/keycloakify/yarn.lock b/keycloak/keycloakify/yarn.lock index e07ee9a3b..eee966b94 100644 --- a/keycloak/keycloakify/yarn.lock +++ b/keycloak/keycloakify/yarn.lock @@ -1456,114 +1456,114 @@ __metadata: languageName: node linkType: hard -"@rollup/rollup-android-arm-eabi@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-android-arm-eabi@npm:4.21.2" +"@rollup/rollup-android-arm-eabi@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.22.4" conditions: os=android & cpu=arm languageName: node linkType: hard -"@rollup/rollup-android-arm64@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-android-arm64@npm:4.21.2" +"@rollup/rollup-android-arm64@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-android-arm64@npm:4.22.4" conditions: os=android & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-arm64@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-darwin-arm64@npm:4.21.2" +"@rollup/rollup-darwin-arm64@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-darwin-arm64@npm:4.22.4" conditions: os=darwin & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-x64@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-darwin-x64@npm:4.21.2" +"@rollup/rollup-darwin-x64@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-darwin-x64@npm:4.22.4" conditions: os=darwin & cpu=x64 languageName: node linkType: hard -"@rollup/rollup-linux-arm-gnueabihf@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.21.2" +"@rollup/rollup-linux-arm-gnueabihf@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.22.4" conditions: os=linux & cpu=arm & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm-musleabihf@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.21.2" +"@rollup/rollup-linux-arm-musleabihf@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.22.4" conditions: os=linux & cpu=arm & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-arm64-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.21.2" +"@rollup/rollup-linux-arm64-gnu@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.22.4" conditions: os=linux & cpu=arm64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm64-musl@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-arm64-musl@npm:4.21.2" +"@rollup/rollup-linux-arm64-musl@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.22.4" conditions: os=linux & cpu=arm64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-powerpc64le-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-powerpc64le-gnu@npm:4.21.2" +"@rollup/rollup-linux-powerpc64le-gnu@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-linux-powerpc64le-gnu@npm:4.22.4" conditions: os=linux & cpu=ppc64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-riscv64-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.21.2" +"@rollup/rollup-linux-riscv64-gnu@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.22.4" conditions: os=linux & cpu=riscv64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-s390x-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.21.2" +"@rollup/rollup-linux-s390x-gnu@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.22.4" conditions: os=linux & cpu=s390x & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-x64-gnu@npm:4.21.2" +"@rollup/rollup-linux-x64-gnu@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.22.4" conditions: os=linux & cpu=x64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-musl@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-x64-musl@npm:4.21.2" +"@rollup/rollup-linux-x64-musl@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.22.4" conditions: os=linux & cpu=x64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-win32-arm64-msvc@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.21.2" +"@rollup/rollup-win32-arm64-msvc@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.22.4" conditions: os=win32 & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-win32-ia32-msvc@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.21.2" +"@rollup/rollup-win32-ia32-msvc@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.22.4" conditions: os=win32 & cpu=ia32 languageName: node linkType: hard -"@rollup/rollup-win32-x64-msvc@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-win32-x64-msvc@npm:4.21.2" +"@rollup/rollup-win32-x64-msvc@npm:4.22.4": + version: 4.22.4 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.22.4" conditions: os=win32 & cpu=x64 languageName: node linkType: hard @@ -7475,25 +7475,25 @@ __metadata: linkType: hard "rollup@npm:^4.20.0": - version: 4.21.2 - resolution: "rollup@npm:4.21.2" - dependencies: - "@rollup/rollup-android-arm-eabi": "npm:4.21.2" - "@rollup/rollup-android-arm64": "npm:4.21.2" - "@rollup/rollup-darwin-arm64": "npm:4.21.2" - "@rollup/rollup-darwin-x64": "npm:4.21.2" - "@rollup/rollup-linux-arm-gnueabihf": "npm:4.21.2" - "@rollup/rollup-linux-arm-musleabihf": "npm:4.21.2" - "@rollup/rollup-linux-arm64-gnu": "npm:4.21.2" - "@rollup/rollup-linux-arm64-musl": "npm:4.21.2" - "@rollup/rollup-linux-powerpc64le-gnu": "npm:4.21.2" - "@rollup/rollup-linux-riscv64-gnu": "npm:4.21.2" - "@rollup/rollup-linux-s390x-gnu": "npm:4.21.2" - "@rollup/rollup-linux-x64-gnu": "npm:4.21.2" - "@rollup/rollup-linux-x64-musl": "npm:4.21.2" - "@rollup/rollup-win32-arm64-msvc": "npm:4.21.2" - "@rollup/rollup-win32-ia32-msvc": "npm:4.21.2" - "@rollup/rollup-win32-x64-msvc": "npm:4.21.2" + version: 4.22.4 + resolution: "rollup@npm:4.22.4" + dependencies: + "@rollup/rollup-android-arm-eabi": "npm:4.22.4" + "@rollup/rollup-android-arm64": "npm:4.22.4" + "@rollup/rollup-darwin-arm64": "npm:4.22.4" + "@rollup/rollup-darwin-x64": "npm:4.22.4" + "@rollup/rollup-linux-arm-gnueabihf": "npm:4.22.4" + "@rollup/rollup-linux-arm-musleabihf": "npm:4.22.4" + "@rollup/rollup-linux-arm64-gnu": "npm:4.22.4" + "@rollup/rollup-linux-arm64-musl": "npm:4.22.4" + "@rollup/rollup-linux-powerpc64le-gnu": "npm:4.22.4" + "@rollup/rollup-linux-riscv64-gnu": "npm:4.22.4" + "@rollup/rollup-linux-s390x-gnu": "npm:4.22.4" + "@rollup/rollup-linux-x64-gnu": "npm:4.22.4" + "@rollup/rollup-linux-x64-musl": "npm:4.22.4" + "@rollup/rollup-win32-arm64-msvc": "npm:4.22.4" + "@rollup/rollup-win32-ia32-msvc": "npm:4.22.4" + "@rollup/rollup-win32-x64-msvc": "npm:4.22.4" "@types/estree": "npm:1.0.5" fsevents: "npm:~2.3.2" dependenciesMeta: @@ -7533,7 +7533,7 @@ __metadata: optional: true bin: rollup: dist/bin/rollup - checksum: 10c0/c9d97f7a21cde110371b2e890a31a996fee09b81e639e79372b962a9638ae653d2d24186b94632fc5dfab8a0582e1d0639dfe34b8b75051facd86915a9585a5f + checksum: 10c0/4c96b6e2e0c5dbe73b4ba899cea894a05115ab8c65ccff631fbbb944e2b3a9f2eb3b99c2dce3dd91b179647df1892ffc44ecee29381ccf155ba8000b22712a32 languageName: node linkType: hard From 37f4ab8aeb4913e492c0e4352c17e9f201817f61 Mon Sep 17 00:00:00 2001 From: Cornelius Roemer Date: Tue, 24 Sep 2024 13:43:09 +0200 Subject: [PATCH 16/20] feat(backend,prepro): use etag to reduce database load (#2768) --------- Co-authored-by: Anna (Anya) Parker <50943381+anna-parker@users.noreply.github.com> --- .../controller/SubmissionController.kt | 18 ++++++ .../SubmissionControllerDescriptions.kt | 2 + .../ExtractUnprocessedDataEndpointTest.kt | 32 ++++++++++ .../submission/SubmissionControllerClient.kt | 15 +++-- kubernetes/loculus/silo_import_job.sh | 5 +- preprocessing/dummy/main.py | 58 ++++++++++--------- preprocessing/nextclade/environment.yml | 3 +- .../src/loculus_preprocessing/backend.py | 58 +++++++++++++++---- .../src/loculus_preprocessing/datatypes.py | 4 +- .../src/loculus_preprocessing/prepro.py | 44 ++++++-------- .../processing_functions.py | 4 +- 11 files changed, 167 insertions(+), 76 deletions(-) diff --git a/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt b/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt index 55ab97b55..6a7dab09c 100644 --- a/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt +++ b/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionController.kt @@ -133,6 +133,19 @@ open class SubmissionController( schema = Schema(implementation = UnprocessedData::class), ), ], + headers = [ + Header( + name = "eTag", + description = "Last database write Etag", + schema = Schema(type = "integer"), + ), + ], + ) + @ApiResponse( + responseCode = "304", + description = + "No database changes since last request " + + "(Etag in HttpHeaders.IF_NONE_MATCH matches lastDatabaseWriteETag)", ) @ApiResponse(responseCode = "422", description = EXTRACT_UNPROCESSED_DATA_ERROR_RESPONSE) @PostMapping("/extract-unprocessed-data", produces = [MediaType.APPLICATION_NDJSON_VALUE]) @@ -143,6 +156,7 @@ open class SubmissionController( message = "You can extract at max $MAX_EXTRACTED_SEQUENCE_ENTRIES sequence entries at once.", ) numberOfSequenceEntries: Int, @RequestParam pipelineVersion: Long, + @RequestHeader(value = HttpHeaders.IF_NONE_MATCH, required = false) ifNoneMatch: String?, ): ResponseEntity { val currentProcessingPipelineVersion = submissionDatabaseService.getCurrentProcessingPipelineVersion() if (pipelineVersion < currentProcessingPipelineVersion) { @@ -152,8 +166,12 @@ open class SubmissionController( ) } + val lastDatabaseWriteETag = releasedDataModel.getLastDatabaseWriteETag() + if (ifNoneMatch == lastDatabaseWriteETag) return ResponseEntity.status(HttpStatus.NOT_MODIFIED).build() + val headers = HttpHeaders() headers.contentType = MediaType.parseMediaType(MediaType.APPLICATION_NDJSON_VALUE) + headers.eTag = lastDatabaseWriteETag val streamBody = streamTransactioned { submissionDatabaseService.streamUnprocessedSubmissions(numberOfSequenceEntries, organism, pipelineVersion) } diff --git a/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionControllerDescriptions.kt b/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionControllerDescriptions.kt index 0f0be7b72..49ec4eb96 100644 --- a/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionControllerDescriptions.kt +++ b/backend/src/main/kotlin/org/loculus/backend/controller/SubmissionControllerDescriptions.kt @@ -146,6 +146,8 @@ and roll back the whole transaction. const val GET_RELEASED_DATA_DESCRIPTION = """ Get released data as a stream of NDJSON. This returns all accession versions that have the status 'APPROVED_FOR_RELEASE'. +Optionally submit the etag received in previous request with If-None-Match +to only retrieve all released data if the database has changed since last request. """ const val GET_RELEASED_DATA_RESPONSE_DESCRIPTION = """ diff --git a/backend/src/test/kotlin/org/loculus/backend/controller/submission/ExtractUnprocessedDataEndpointTest.kt b/backend/src/test/kotlin/org/loculus/backend/controller/submission/ExtractUnprocessedDataEndpointTest.kt index 72fafc1a5..52bbfa62c 100644 --- a/backend/src/test/kotlin/org/loculus/backend/controller/submission/ExtractUnprocessedDataEndpointTest.kt +++ b/backend/src/test/kotlin/org/loculus/backend/controller/submission/ExtractUnprocessedDataEndpointTest.kt @@ -27,7 +27,10 @@ import org.loculus.backend.controller.expectUnauthorizedResponse import org.loculus.backend.controller.getAccessionVersions import org.loculus.backend.controller.jwtForDefaultUser import org.loculus.backend.controller.submission.SubmitFiles.DefaultFiles +import org.loculus.backend.controller.submission.SubmitFiles.DefaultFiles.NUMBER_OF_SEQUENCES import org.springframework.beans.factory.annotation.Autowired +import org.springframework.http.HttpHeaders.ETAG +import org.springframework.test.web.servlet.result.MockMvcResultMatchers.header import org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath import org.springframework.test.web.servlet.result.MockMvcResultMatchers.status @@ -67,6 +70,35 @@ class ExtractUnprocessedDataEndpointTest( assertThat(responseBody, `is`(emptyList())) } + @Test + fun `GIVEN header etag equal etag from last db update THEN respond with 304, ELSE respond with data and etag`() { + val submissionResult = convenienceClient.submitDefaultFiles() + val response = client.extractUnprocessedData(DefaultFiles.NUMBER_OF_SEQUENCES) + + val initialEtag = response.andReturn().response.getHeader(ETAG) + + val responseBody = response.expectNdjsonAndGetContent() + assertThat(responseBody.size, `is`(DefaultFiles.NUMBER_OF_SEQUENCES)) + + val responseAfterUpdatingTable = client.extractUnprocessedData( + DefaultFiles.NUMBER_OF_SEQUENCES, + ifNoneMatch = initialEtag, + ).andExpect(status().isOk) + + val emptyResponseBody = responseAfterUpdatingTable.expectNdjsonAndGetContent() + assertThat(emptyResponseBody.size, `is`(0)) + + val secondEtag = responseAfterUpdatingTable.andReturn().response.getHeader(ETAG) + + val responseNoNewData = client.extractUnprocessedData( + DefaultFiles.NUMBER_OF_SEQUENCES, + ifNoneMatch = secondEtag, + ) + + responseNoNewData.andExpect(status().isNotModified) + .andExpect(header().doesNotExist(ETAG)) + } + @Test fun `WHEN extracting unprocessed data THEN only previously not extracted sequence entries are returned`() { val submissionResult = convenienceClient.submitDefaultFiles() diff --git a/backend/src/test/kotlin/org/loculus/backend/controller/submission/SubmissionControllerClient.kt b/backend/src/test/kotlin/org/loculus/backend/controller/submission/SubmissionControllerClient.kt index 716ddc880..942771d32 100644 --- a/backend/src/test/kotlin/org/loculus/backend/controller/submission/SubmissionControllerClient.kt +++ b/backend/src/test/kotlin/org/loculus/backend/controller/submission/SubmissionControllerClient.kt @@ -58,13 +58,20 @@ class SubmissionControllerClient(private val mockMvc: MockMvc, private val objec numberOfSequenceEntries: Int, organism: String = DEFAULT_ORGANISM, pipelineVersion: Long = DEFAULT_PIPELINE_VERSION, + ifNoneMatch: String? = null, jwt: String? = jwtForProcessingPipeline, - ): ResultActions = mockMvc.perform( - post(addOrganismToPath("/extract-unprocessed-data", organism = organism)) + ): ResultActions { + val requestBuilder = post(addOrganismToPath("/extract-unprocessed-data", organism = organism)) .withAuth(jwt) .param("numberOfSequenceEntries", numberOfSequenceEntries.toString()) - .param("pipelineVersion", pipelineVersion.toString()), - ) + .param("pipelineVersion", pipelineVersion.toString()) + + if (ifNoneMatch != null) { + requestBuilder.header("If-None-Match", ifNoneMatch) + } + + return mockMvc.perform(requestBuilder) + } fun submitProcessedData( vararg submittedProcessedData: SubmittedProcessedData, diff --git a/kubernetes/loculus/silo_import_job.sh b/kubernetes/loculus/silo_import_job.sh index 36796c235..c4c30396f 100755 --- a/kubernetes/loculus/silo_import_job.sh +++ b/kubernetes/loculus/silo_import_job.sh @@ -85,7 +85,6 @@ download_data() { http_status_code=$(curl -o "$new_input_data_path" --fail-with-body "$released_data_endpoint" -H "If-None-Match: $last_etag" -D "$new_input_header_path" -w "%{http_code}") exit_code=$? set -e - echo "Release data request returned with http status code: $http_status_code" if [ "$http_status_code" -eq 304 ]; then echo "State in Loculus backend has not changed: HTTP 304 Not Modified." @@ -109,8 +108,8 @@ download_data() { expected_record_count=$(grep -i '^x-total-records:' "$new_input_header_path" | awk '{print $2}' | tr -d '[:space:]') echo "Response should contain a total of : $expected_record_count records" - # jq validates each individual json object, to catch truncated lines - true_record_count=$(zstd -d -c "$new_input_data_path" | jq -c . | wc -l | tr -d '[:space:]') + # jq validates each individual json object, to catch truncated lines + true_record_count=$(zstd -d -c "$new_input_data_path" | jq -c . | wc -l | tr -d '[:space:]') echo "Response contained a total of : $true_record_count records" if [ "$true_record_count" -ne "$expected_record_count" ]; then diff --git a/preprocessing/dummy/main.py b/preprocessing/dummy/main.py index 9958a2d76..886e1b59d 100644 --- a/preprocessing/dummy/main.py +++ b/preprocessing/dummy/main.py @@ -87,21 +87,28 @@ class Sequence: ) -def fetch_unprocessed_sequences(n: int) -> List[Sequence]: +def fetch_unprocessed_sequences(etag: str | None, n: int) -> tuple[str | None, List[Sequence]]: url = backendHost + "/extract-unprocessed-data" params = {"numberOfSequenceEntries": n, "pipelineVersion": pipeline_version} - headers = {"Authorization": "Bearer " + get_jwt()} + headers = { + "Authorization": "Bearer " + get_jwt(), + **({"If-None-Match": etag} if etag else {}), + } response = requests.post(url, data=params, headers=headers) - if not response.ok: - if response.status_code == 422: - logging.debug("{}. Sleeping for a while.".format(response.text)) + match response.status_code: + case 200: + return response.headers.get("ETag"), parse_ndjson(response.text) + case 304: + return etag, [] + case 422: + logging.debug(f"{response.text}. Sleeping for a while.") time.sleep(60 * 10) - return [] - raise Exception( - "Fetching unprocessed data failed. Status code: {}".format(response.status_code), - response.text, - ) - return parse_ndjson(response.text) + return None, [] + case _: + raise Exception( + f"Fetching unprocessed data failed. Status code: {response.status_code}", + response.text, + ) def parse_ndjson(ndjson_data: str) -> List[Sequence]: @@ -181,7 +188,7 @@ def submit_processed_sequences(processed: List[Sequence]): response = requests.post(url, data=ndjson_string, headers=headers) if not response.ok: raise Exception( - "Submitting processed data failed. Status code: {}".format(response.status_code), + f"Submitting processed data failed. Status code: {response.status_code}", response.text, ) @@ -196,37 +203,36 @@ def get_jwt(): } response = requests.post(url, data=data) if not response.ok: - raise Exception( - "Fetching JWT failed. Status code: {}".format(response.status_code), response.text - ) + raise Exception(f"Fetching JWT failed. Status code: {response.status_code}", response.text) return response.json()["access_token"] def main(): total_processed = 0 locally_processed = 0 + etag = None + last_force_refresh = time.time() if watch_mode: logging.debug("Started in watch mode - waiting 10 seconds before fetching data.") time.sleep(10) - if args.maxSequences and args.maxSequences < 100: - sequences_to_fetch = args.maxSequences - else: - sequences_to_fetch = 100 + sequences_to_fetch = args.maxSequences if args.maxSequences and args.maxSequences < 100 else 100 while True: - unprocessed = fetch_unprocessed_sequences(sequences_to_fetch) + if last_force_refresh + 3600 < time.time(): + etag = None + last_force_refresh = time.time() + + etag, unprocessed = fetch_unprocessed_sequences(etag, sequences_to_fetch) if len(unprocessed) == 0: if watch_mode: - logging.debug( - "Processed {} sequences. Sleeping for 10 seconds.".format(locally_processed) - ) + logging.debug(f"Processed {locally_processed} sequences. Sleeping for 10 seconds.") time.sleep(2) locally_processed = 0 continue - else: - break + break + etag = None processed = process(unprocessed) submit_processed_sequences(processed) total_processed += len(processed) @@ -234,7 +240,7 @@ def main(): if args.maxSequences and total_processed >= args.maxSequences: break - logging.debug("Total processed sequences: {}".format(total_processed)) + logging.debug(f"Total processed sequences: {total_processed}") if __name__ == "__main__": diff --git a/preprocessing/nextclade/environment.yml b/preprocessing/nextclade/environment.yml index 131ac1446..a1e9811b4 100644 --- a/preprocessing/nextclade/environment.yml +++ b/preprocessing/nextclade/environment.yml @@ -2,11 +2,12 @@ name: loculus-nextclade channels: - conda-forge - bioconda + - nodefaults dependencies: - python=3.12 - biopython=1.83 - dpath=2.1 - - nextclade=3.5 + - nextclade=3.8 - pip=24.0 - PyYAML=6.0 - pyjwt=2.8 diff --git a/preprocessing/nextclade/src/loculus_preprocessing/backend.py b/preprocessing/nextclade/src/loculus_preprocessing/backend.py index 8a6028b56..9dbdc4883 100644 --- a/preprocessing/nextclade/src/loculus_preprocessing/backend.py +++ b/preprocessing/nextclade/src/loculus_preprocessing/backend.py @@ -16,6 +16,8 @@ from .config import Config from .datatypes import ( ProcessedEntry, + UnprocessedData, + UnprocessedEntry, ) @@ -66,24 +68,56 @@ def get_jwt(config: Config) -> str: raise Exception(error_msg) -def fetch_unprocessed_sequences(n: int, config: Config) -> str: +def parse_ndjson(ndjson_data: str) -> Sequence[UnprocessedEntry]: + entries = [] + for json_str in ndjson_data.split("\n"): + if len(json_str) == 0: + continue + # Loculus currently cannot handle non-breaking spaces. + json_str_processed = json_str.replace("\N{NO-BREAK SPACE}", " ") + json_object = json.loads(json_str_processed) + unprocessed_data = UnprocessedData( + submitter=json_object["submitter"], + metadata=json_object["data"]["metadata"], + unalignedNucleotideSequences=json_object["data"]["unalignedNucleotideSequences"], + ) + entry = UnprocessedEntry( + accessionVersion=f"{json_object['accession']}.{ + json_object['version']}", + data=unprocessed_data, + ) + entries.append(entry) + return entries + + +def fetch_unprocessed_sequences( + etag: str | None, config: Config +) -> tuple[str | None, Sequence[UnprocessedEntry] | None]: + n = config.batch_size url = config.backend_host.rstrip("/") + "/extract-unprocessed-data" logging.debug(f"Fetching {n} unprocessed sequences from {url}") params = {"numberOfSequenceEntries": n, "pipelineVersion": config.pipeline_version} - headers = {"Authorization": "Bearer " + get_jwt(config)} + headers = { + "Authorization": "Bearer " + get_jwt(config), + **({"If-None-Match": etag} if etag else {}), + } + logging.debug(f"Requesting data with ETag: {etag}") response = requests.post(url, data=params, headers=headers, timeout=10) - if not response.ok: - if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + match response.status_code: + case HTTPStatus.NOT_MODIFIED: + return etag, None + case HTTPStatus.OK: + return response.headers["ETag"], parse_ndjson(response.text) + case HTTPStatus.UNPROCESSABLE_ENTITY: logging.debug(f"{response.text}.\nSleeping for a while.") time.sleep(60 * 1) - return "" - msg = f"Fetching unprocessed data failed. Status code: { - response.status_code}" - raise Exception( - msg, - response.text, - ) - return response.text + return None, None + case _: + msg = f"Fetching unprocessed data failed. Status code: {response.status_code}" + raise Exception( + msg, + response.text, + ) def submit_processed_sequences( diff --git a/preprocessing/nextclade/src/loculus_preprocessing/datatypes.py b/preprocessing/nextclade/src/loculus_preprocessing/datatypes.py index 8acebb6a9..215028624 100644 --- a/preprocessing/nextclade/src/loculus_preprocessing/datatypes.py +++ b/preprocessing/nextclade/src/loculus_preprocessing/datatypes.py @@ -1,7 +1,7 @@ # ruff: noqa: N815 from dataclasses import dataclass, field from enum import StrEnum, unique -from typing import List, Tuple, Any +from typing import Any AccessionVersion = str GeneName = str @@ -37,7 +37,7 @@ def __hash__(self): @dataclass(frozen=True) class ProcessingAnnotation: - source: Tuple[AnnotationSource, ...] + source: tuple[AnnotationSource, ...] message: str def __post_init__(self): diff --git a/preprocessing/nextclade/src/loculus_preprocessing/prepro.py b/preprocessing/nextclade/src/loculus_preprocessing/prepro.py index 715105b88..2c6474e75 100644 --- a/preprocessing/nextclade/src/loculus_preprocessing/prepro.py +++ b/preprocessing/nextclade/src/loculus_preprocessing/prepro.py @@ -52,28 +52,6 @@ # Functions related to reading and writing files -def parse_ndjson(ndjson_data: str) -> Sequence[UnprocessedEntry]: - entries = [] - for json_str in ndjson_data.split("\n"): - if len(json_str) == 0: - continue - # Loculus currently cannot handle non-breaking spaces. - json_str_processed = json_str.replace("\N{NO-BREAK SPACE}", " ") - json_object = json.loads(json_str_processed) - unprocessed_data = UnprocessedData( - submitter=json_object["submitter"], - metadata=json_object["data"]["metadata"], - unalignedNucleotideSequences=json_object["data"]["unalignedNucleotideSequences"], - ) - entry = UnprocessedEntry( - accessionVersion=f"{json_object['accession']}.{ - json_object['version']}", - data=unprocessed_data, - ) - entries.append(entry) - return entries - - def parse_nextclade_tsv( amino_acid_insertions: defaultdict[ AccessionVersion, defaultdict[GeneName, list[AminoAcidInsertion]] @@ -725,17 +703,29 @@ def run(config: Config) -> None: if config.nextclade_dataset_name: download_nextclade_dataset(dataset_dir, config) total_processed = 0 + etag = None + last_force_refresh = time.time() while True: logging.debug("Fetching unprocessed sequences") - unprocessed = parse_ndjson(fetch_unprocessed_sequences(config.batch_size, config)) - if len(unprocessed) == 0: + # Reset etag every hour just in case + if last_force_refresh + 3600 < time.time(): + etag = None + last_force_refresh = time.time() + etag, unprocessed = fetch_unprocessed_sequences(etag, config) + if not unprocessed: # sleep 1 sec and try again logging.debug("No unprocessed sequences found. Sleeping for 1 second.") time.sleep(1) continue - # Process the sequences, get result as dictionary - processed = process_all(unprocessed, dataset_dir, config) - # Submit the result + # Don't use etag if we just got data, preprocessing only asks for 100 sequences to process at a time, so there might be more + etag = None + try: + processed = process_all(unprocessed, dataset_dir, config) + except Exception as e: + logging.exception( + f"Processing failed. Traceback : {e}. Unprocessed data: {unprocessed}" + ) + continue try: submit_processed_sequences(processed, dataset_dir, config) except RuntimeError as e: diff --git a/preprocessing/nextclade/src/loculus_preprocessing/processing_functions.py b/preprocessing/nextclade/src/loculus_preprocessing/processing_functions.py index aeed9d114..780764d66 100644 --- a/preprocessing/nextclade/src/loculus_preprocessing/processing_functions.py +++ b/preprocessing/nextclade/src/loculus_preprocessing/processing_functions.py @@ -423,7 +423,9 @@ def identity( warnings.append( ProcessingAnnotation( source=[ - AnnotationSource(name=output_field, type=AnnotationSourceType.METADATA) + AnnotationSource( + name=output_field, type=AnnotationSourceType.METADATA + ) ], message=f"Invalid boolean value: {input_datum}. Defaulting to null.", ) From b264449917ace729ccbe647841a0efecd4ff4d52 Mon Sep 17 00:00:00 2001 From: "Anna (Anya) Parker" <50943381+anna-parker@users.noreply.github.com> Date: Tue, 24 Sep 2024 15:06:34 +0200 Subject: [PATCH 17/20] feat(ena-submission): Add INSDC accession to results of ena submission and upload to backend. (#2845) * Add INSDC accession base and accession full to results of ena submission and upload to backend. * Update readme with more tips on local testing and warning * Refactor to make test a flag and local testing clearer. * Wait longer when retrying if get_group_info fails. --- ena-submission/README.md | 108 ++++++++++++++++-- ena-submission/Snakefile | 29 ++++- ena-submission/config/defaults.yaml | 4 +- ena-submission/scripts/create_assembly.py | 73 +++++++++--- ena-submission/scripts/create_project.py | 22 +++- ena-submission/scripts/create_sample.py | 24 ++-- .../scripts/ena_submission_helper.py | 59 ++++++++-- .../upload_external_metadata_to_loculus.py | 13 +++ 8 files changed, 277 insertions(+), 55 deletions(-) diff --git a/ena-submission/README.md b/ena-submission/README.md index aaf488a6c..2c55fdc54 100644 --- a/ena-submission/README.md +++ b/ena-submission/README.md @@ -151,6 +151,10 @@ Then run snakemake using `snakemake` or `snakemake {rule}`. ## Testing +> [!WARNING] +> When testing always submit to ENA's test/dev instance. This means for XML post requests (i.e. for project and sample creation), sending them to `https://wwwdev.ebi.ac.uk/ena` and for webin-cli requests (i.e. assembly creation) adding the `-test` flag. This is done automatically when the `submit_to_ena_prod` is set to False (which is the default). Do not change this flag locally unless you know what you are doing. +> Using our ENA test account does **not** affect which ENA instance you submit to, if you use our test account and submit to ENA production you will have officially submitted samples to ENA. + ### Run tests ```sh @@ -160,19 +164,103 @@ python3 scripts/test_ena_submission.py ### Testing submission locally -ENA-submission currently is only triggered after manual approval. +1. Run loculus locally (need prepro, backend and ena-submission pod), e.g. + +```sh +../deploy.py cluster --dev +../deploy.py helm --dev --enablePreprocessing +../generate_local_test_config.sh +cd ../backend +./start_dev.sh & +cd ../ena-submission +micromamba activate loculus-ena-submission +flyway -user=postgres -password=unsecure -url=jdbc:postgresql://127.0.0.1:5432/loculus -schemas=ena-submission -locations=filesystem:./flyway/sql migrate +``` + +2. Submit data to the backend as test user (create group, submit and approve), e.g. using [example data](https://github.com/pathoplexus/example_data). (To test the full submission cycle with insdc accessions submit cchf example data with only 2 segments.) + +```sh +KEYCLOAK_TOKEN_URL="http://localhost:8083/realms/loculus/protocol/openid-connect/token" +KEYCLOAK_CLIENT_ID="backend-client" +usernameAndPassword="testuser" +jwt_keycloak=$(curl -X POST "$KEYCLOAK_TOKEN_URL" --fail-with-body -H 'Content-Type: application/x-www-form-urlencoded' -d "username=$usernameAndPassword&password=$usernameAndPassword&grant_type=password&client_id=$KEYCLOAK_CLIENT_ID") +JWT=$(echo "$jwt_keycloak" | jq -r '.access_token') +curl -X 'POST' 'http://localhost:8079/groups' \ + -H 'accept: application/json' \ + -H "Authorization: Bearer ${JWT}" \ + -H 'Content-Type: application/json' \ + -d '{ + "groupName": "ENA submission Group", + "institution": "University of Loculus", + "address": { + "line1": "1234 Loculus Street", + "line2": "Apt 1", + "city": "Dortmund", + "state": "NRW", + "postalCode": "12345", + "country": "Germany" + }, + "contactEmail": "something@loculus.org"}' +LOCULUS_ACCESSION = $(curl -X 'POST' \ + 'http://localhost:8079/cchf/submit?groupId=1&dataUseTermsType=OPEN' \ + -H 'accept: application/json' \ + -H "Authorization: Bearer ${JWT}" \ + -H 'Content-Type: multipart/form-data' \ + -F 'metadataFile=@../../example_data/example_files/cchfv_test_metadata.tsv;type=text/tab-separated-values' \ + -F 'sequenceFile=@../../example_data/example_files/cchfv_test_sequences.fasta' | jq -r '.[0].accession') +curl -X 'POST' \ + 'http://localhost:8079/cchf/approve-processed-data' \ + -H 'accept: application/json' \ + -H "Authorization: Bearer ${JWT}" + -H 'Content-Type: application/json' \ + -d '{"scope": "ALL"}' +``` + +3. Get list of sequences ready to submit to ENA, locally this will write `results/ena_submission_list.json`. + +```sh +snakemake get_ena_submission_list +``` + +4. Check contents and then rename to `results/approved_ena_submission_list.json`, trigger ena submission by adding entries to the submission table + +```sh +cp results/ena_submission_list.json results/approved_ena_submission_list.json +snakemake trigger_submission_to_ena_from_file +``` + +Alternatively you can upload data to the [test folder](https://github.com/pathoplexus/ena-submission/blob/main/test/approved_ena_submission_list.json) and run `snakemake trigger_submission_to_ena`. + +5. Create project, sample and assembly: `snakemake results/project_created results/sample_created results/assembly_created` - you will need the credentials of the ENA test submission account for this. (You can terminate the rules after you see assembly creation has been successful, or earlier if you see errors.) + +6. Note that ENA's dev server does not always finish processing and you might not receive a `gcaAccession` for your dev submissions. If you would like to test the full submission cycle on the ENA dev instance it makes sense to manually alter the gcaAccession in the database to `ERZ24784470` (a known test submission with 2 chromosomes/segments - sadly ERZ accessions are private so I do not have other test examples). You can do this after connecting via pgAdmin or connecting via the CLI: + +```sh +psql -h 127.0.0.1:5432 -U postgres -d loculus +``` + +Then perform the update: + +```sql +SET search_path TO "ena-submission"; +UPDATE assembly_table +SET result = '{"erz_accession": "ERZ24784470", "segment_order": ["L", "M"]}'::jsonb +WHERE accession = '$LOCULUS_ACCESSION'; +``` + +Exit `psql` using `\q`. -The `get_ena_submission_list` runs as a cron-job. It queries Loculus for new sequences to submit to ENA (these are sequences that are in state OPEN, were not submitted by the INSDC_INGEST_USER, do not include ena external_metadata fields and are not yet in the submission_table of the ena-submission schema). If it finds new sequences it sends a notification to slack with all sequences. +7. Upload to loculus (you can run the webpage locally if you would like to see this visually), `snakemake results/assembly_created results/uploaded_external_metadata`. -It is then the reviewer's turn to review these sequences. [TODO: define review criteria] If these sequences meet our criteria they should be uploaded to [pathoplexus/ena-submission](https://github.com/pathoplexus/ena-submission/blob/main/approved/approved_ena_submission_list.json) (currently we read data from the [test folder](https://github.com/pathoplexus/ena-submission/blob/main/test/approved_ena_submission_list.json) - but this will be changed to the `approved` folder in production). The `trigger_submission_to_ena` rule is constantly checking this folder for new sequences and adding them to the submission_table if they are not already there. Note we cannot yet handle revisions so these should not be added to the approved list [TODO: do not allow submission of revised sequences in `trigger_submission_to_ena`]- revisions will still have to be performed manually. +If you experience issues you can look at the database locally using pgAdmin. On local instances the password is `unsecure`. -If you would like to test `trigger_submission_to_ena` while running locally you can also use the `trigger_submission_to_ena_from_file` rule, this will read in data from `results/approved_ena_submission_list.json` (see the test folder for an example). You can also upload data to the [test folder](https://github.com/pathoplexus/ena-submission/blob/main/test/approved_ena_submission_list.json) - note that if you add fake data with a non-existent group-id the project creation will fail, additionally the `upload_to_loculus` rule will fail if these sequences do not actually exist in your loculus instance. +### Testing submission on a preview instance -All other rules query the `submission_table` for projects/samples and assemblies to submit. Once successful they add accessions to the `results` column in dictionary format. Finally, once the entire process has succeeded the new external metadata will be uploaded to Loculus. +1. Upload data to the [test folder](https://github.com/pathoplexus/ena-submission/blob/main/test/approved_ena_submission_list.json) - note that if you add fake data with a non-existent group-id the project creation will fail, additionally the `upload_to_loculus` rule will fail if these sequences do not actually exist in your loculus instance. -Note that ENA's dev server does not always finish processing and you might not receive a gcaAccession for your dev submissions. If you would like to test the full submission cycle on the ENA dev instance it makes sense to manually alter the gcaAccession in the database using `ERZ24784470`. You can connect to a preview instance via port forwarding to these changes on local database tool such as pgAdmin: +2. Connect to the database of the preview instance via port forwarding using a database tool such as pgAdmin: -1. Apply the preview `~/.kube/config` -2. Find the database POD using `kubectl get pods -A | grep database` -3. Connect via port-forwarding `kubectl port-forward $POD -n $NAMESPACE 5432:5432` -4. If necessary find password using `kubectl get secret` +- Apply the preview `~/.kube/config` +- Find the database POD using `kubectl get pods -A | grep database` +- Connect via port-forwarding `kubectl port-forward $POD -n $NAMESPACE 5432:5432` +- If necessary find password using `kubectl get secret` diff --git a/ena-submission/Snakefile b/ena-submission/Snakefile index 26536690e..32ec0d227 100644 --- a/ena-submission/Snakefile +++ b/ena-submission/Snakefile @@ -13,12 +13,31 @@ for key, value in defaults.items(): if not key in config: config[key] = value +LOG_LEVEL = config.get("log_level", "INFO") +SUBMIT_TO_ENA_PROD = config.get("submit_to_ena_prod", False) +SUBMIT_TO_ENA_DEV = not SUBMIT_TO_ENA_PROD + +if SUBMIT_TO_ENA_DEV: + print("Submitting to ENA dev environment") + config["ena_submission_url"] = "https://wwwdev.ebi.ac.uk/ena/submit/drop-box/submit" + config["github_url"] = ( + "https://raw.githubusercontent.com/pathoplexus/ena-submission/main/test/approved_ena_submission_list.json" + ) + config["ena_reports_service_url"] = "https://wwwdev.ebi.ac.uk/ena/submit/report" + +if SUBMIT_TO_ENA_PROD: + print("WARNING: Submitting to ENA production") + config["ena_submission_url"] = "https://www.ebi.ac.uk/ena/submit/drop-box/submit" + config["github_url"] = ( + "https://raw.githubusercontent.com/pathoplexus/ena-submission/main/approved/approved_ena_submission_list.json" + ) + config["ena_reports_service_url"] = "https://www.ebi.ac.uk/ena/submit/report" + + Path("results").mkdir(parents=True, exist_ok=True) with open("results/config.yaml", "w") as f: f.write(yaml.dump(config)) -LOG_LEVEL = config.get("log_level", "INFO") - rule all: input: @@ -88,11 +107,13 @@ rule create_project: project_created=touch("results/project_created"), params: log_level=LOG_LEVEL, + test_flag="--test" if SUBMIT_TO_ENA_DEV else "", shell: """ python {input.script} \ --config-file {input.config} \ --log-level {params.log_level} \ + {params.test_flag} """ @@ -104,11 +125,13 @@ rule create_sample: sample_created=touch("results/sample_created"), params: log_level=LOG_LEVEL, + test_flag="--test" if SUBMIT_TO_ENA_DEV else "", shell: """ python {input.script} \ --config-file {input.config} \ --log-level {params.log_level} \ + {params.test_flag} """ @@ -120,11 +143,13 @@ rule create_assembly: sample_created=touch("results/assembly_created"), params: log_level=LOG_LEVEL, + test_flag="--test" if SUBMIT_TO_ENA_DEV else "", shell: """ python {input.script} \ --config-file {input.config} \ --log-level {params.log_level} \ + {params.test_flag} """ diff --git a/ena-submission/config/defaults.yaml b/ena-submission/config/defaults.yaml index 5c1f9eb1c..3dfa7d682 100644 --- a/ena-submission/config/defaults.yaml +++ b/ena-submission/config/defaults.yaml @@ -6,9 +6,7 @@ db_name: Loculus unique_project_suffix: Loculus ena_submission_username: fake-user ena_submission_password: fake-password -ena_submission_url: https://wwwdev.ebi.ac.uk/ena/submit/drop-box/submit # TODO(https://github.com/loculus-project/loculus/issues/2425): update in production -github_url: https://raw.githubusercontent.com/pathoplexus/ena-submission/main/test/approved_ena_submission_list.json # TODO(https://github.com/loculus-project/loculus/issues/2425): update in production -ena_reports_service_url: https://wwwdev.ebi.ac.uk/ena/submit/report # TODO(https://github.com/loculus-project/loculus/issues/2425): update in production +submit_to_ena_prod: False # TODO(https://github.com/loculus-project/loculus/issues/2425): update in production #ena_checklist: ERC000033 - do not use until all fields are mapped to ENA accepted options metadata_mapping: 'subject exposure': diff --git a/ena-submission/scripts/create_assembly.py b/ena-submission/scripts/create_assembly.py index b2d37323e..bb98111f3 100644 --- a/ena-submission/scripts/create_assembly.py +++ b/ena-submission/scripts/create_assembly.py @@ -79,16 +79,17 @@ def create_chromosome_list_object( entries: list[AssemblyChromosomeListFileObject] = [] - if len(unaligned_sequences.keys()) > 1: - for segment_name, item in unaligned_sequences.items(): - if item: # Only list sequenced segments - entry = AssemblyChromosomeListFileObject( - object_name=f"{seq_key["accession"]}.{seq_key["version"]}_{segment_name}", - chromosome_name=segment_name, - chromosome_type=chromosome_type, - ) - entries.append(entry) - else: + segment_order = get_segment_order(unaligned_sequences) + + for segment_name in segment_order: + if segment_name != "main": + entry = AssemblyChromosomeListFileObject( + object_name=f"{seq_key["accession"]}.{seq_key["version"]}_{segment_name}", + chromosome_name=segment_name, + chromosome_type=chromosome_type, + ) + entries.append(entry) + continue entry = AssemblyChromosomeListFileObject( object_name=f"{seq_key["accession"]}.{seq_key["version"]}", chromosome_name="main", @@ -99,6 +100,17 @@ def create_chromosome_list_object( return AssemblyChromosomeListFile(chromosomes=entries) +def get_segment_order(unaligned_sequences) -> list[str]: + segment_order = [] + if len(unaligned_sequences.keys()) > 1: + for segment_name, item in unaligned_sequences.items(): + if item: # Only list sequenced segments + segment_order.append(segment_name) + else: + segment_order.append("main") + return sorted(segment_order) + + def create_manifest_object( config: Config, sample_table_entry: dict[str, str], @@ -108,6 +120,17 @@ def create_manifest_object( group_key: dict[str, str], test=False, ) -> AssemblyManifest: + """ + Create an AssemblyManifest object for an entry in the assembly table using: + - the corresponding ena_sample_accession and bioproject_accession + - the organism metadata from the config file + - sequencing metadata from the corresponding submission table entry + - unaligned nucleotide sequences from the corresponding submission table entry, + these are used to create chromosome files and fasta files which are passed to the manifest. + + If test=True add a timestamp to the alias suffix to allow for multiple submissions of the same + manifest for testing. + """ sample_accession = sample_table_entry["result"]["ena_sample_accession"] study_accession = project_table_entry["result"]["bioproject_accession"] @@ -264,13 +287,18 @@ def submission_table_update(db_config: SimpleConnectionPool): raise RuntimeError(error_msg) -def assembly_table_create(db_config: SimpleConnectionPool, config: Config, retry_number: int = 3): +def assembly_table_create( + db_config: SimpleConnectionPool, config: Config, retry_number: int = 3, test: bool = False +): """ 1. Find all entries in assembly_table in state READY 2. Create temporary files: chromosome_list_file, fasta_file, manifest_file 3. Update assembly_table to state SUBMITTING (only proceed if update succeeds) 4. If (create_ena_assembly succeeds): update state to SUBMITTED with results 3. Else update state to HAS_ERRORS with error messages + + If test=True: add a timestamp to the alias suffix to allow for multiple submissions of the same + manifest for testing AND use the test ENA webin-cli endpoint for submission. """ ena_config = get_ena_config( config.ena_submission_username, @@ -321,7 +349,7 @@ def assembly_table_create(db_config: SimpleConnectionPool, config: Config, retry sample_data_in_submission_table[0], seq_key, group_key, - test=True, # TODO(https://github.com/loculus-project/loculus/issues/2425): remove in production + test, ) manifest_file = create_manifest(manifest_object) @@ -340,10 +368,14 @@ def assembly_table_create(db_config: SimpleConnectionPool, config: Config, retry ) continue logger.info(f"Starting assembly creation for accession {row["accession"]}") + segment_order = get_segment_order( + sample_data_in_submission_table[0]["unaligned_nucleotide_sequences"] + ) assembly_creation_results: CreationResults = create_ena_assembly( - ena_config, manifest_file, center_name=center_name + ena_config, manifest_file, center_name=center_name, test=test ) if assembly_creation_results.results: + assembly_creation_results.results["segment_order"] = segment_order update_values = { "status": Status.WAITING, "result": json.dumps(assembly_creation_results.results), @@ -416,7 +448,10 @@ def assembly_table_update( logger.debug("Checking state in ENA") for row in waiting: seq_key = {"accession": row["accession"], "version": row["version"]} - check_results: CreationResults = check_ena(ena_config, row["result"]["erz_accession"]) + segment_order = row["result"]["segment_order"] + check_results: CreationResults = check_ena( + ena_config, row["result"]["erz_accession"], segment_order + ) _last_ena_check = time if not check_results.results: continue @@ -502,7 +537,13 @@ def assembly_table_handle_errors( required=True, type=click.Path(exists=True), ) -def create_assembly(log_level, config_file): +@click.option( + "--test", + is_flag=True, + default=False, + help="Allow multiple submissions of the same project for testing AND use the webin-cli test endpoint", +) +def create_assembly(log_level, config_file, test=False): logger.setLevel(log_level) logging.getLogger("requests").setLevel(logging.INFO) @@ -523,7 +564,7 @@ def create_assembly(log_level, config_file): submission_table_start(db_config) submission_table_update(db_config) - assembly_table_create(db_config, config, retry_number=3) + assembly_table_create(db_config, config, retry_number=3, test=test) assembly_table_update(db_config, config) assembly_table_handle_errors(db_config, config, slack_config) time.sleep(2) diff --git a/ena-submission/scripts/create_project.py b/ena-submission/scripts/create_project.py index 564dbf3f7..b3e617628 100644 --- a/ena-submission/scripts/create_project.py +++ b/ena-submission/scripts/create_project.py @@ -83,7 +83,7 @@ def construct_project_set_object( if test: alias = XmlAttribute( f"{entry["group_id"]}:{entry["organism"]}:{config.unique_project_suffix}:{datetime.now(tz=pytz.utc)}" - ) # TODO(https://github.com/loculus-project/loculus/issues/2425): remove in production + ) else: alias = XmlAttribute( f"{entry["group_id"]}:{entry["organism"]}:{config.unique_project_suffix}" @@ -217,13 +217,18 @@ def submission_table_update(db_config: SimpleConnectionPool): raise RuntimeError(error_msg) -def project_table_create(db_config: SimpleConnectionPool, config: Config, retry_number: int = 3): +def project_table_create( + db_config: SimpleConnectionPool, config: Config, retry_number: int = 3, test: bool = False +): """ 1. Find all entries in project_table in state READY 2. Create project_set: get_group_info from loculus, use entry and config for other fields 3. Update project_table to state SUBMITTING (only proceed if update succeeds) 4. If (create_ena_project succeeds): update state to SUBMITTED with results 3. Else update state to HAS_ERRORS with error messages + + If test=True add a timestamp to the alias suffix to allow for multiple submissions of the same + project for testing. """ ena_config = get_ena_config( config.ena_submission_username, @@ -243,9 +248,10 @@ def project_table_create(db_config: SimpleConnectionPool, config: Config, retry_ group_info = get_group_info(config, row["group_id"])[0]["group"] except Exception as e: logger.error(f"Was unable to get group info for group: {row["group_id"]}, {e}") + time.sleep(30) continue - project_set = construct_project_set_object(group_info, config, row, test=True) + project_set = construct_project_set_object(group_info, config, row, test) update_values = { "status": Status.SUBMITTING, "started_at": datetime.now(tz=pytz.utc), @@ -358,7 +364,13 @@ def project_table_handle_errors( required=True, type=click.Path(exists=True), ) -def create_project(log_level, config_file): +@click.option( + "--test", + is_flag=True, + default=False, + help="Allow multiple submissions of the same project for testing", +) +def create_project(log_level, config_file, test=False): logger.setLevel(log_level) logging.getLogger("requests").setLevel(logging.INFO) @@ -379,7 +391,7 @@ def create_project(log_level, config_file): submission_table_start(db_config) submission_table_update(db_config) - project_table_create(db_config, config) + project_table_create(db_config, config, test=test) project_table_handle_errors(db_config, config, slack_config) time.sleep(2) diff --git a/ena-submission/scripts/create_sample.py b/ena-submission/scripts/create_sample.py index 82f9dad5d..dee1b5936 100644 --- a/ena-submission/scripts/create_sample.py +++ b/ena-submission/scripts/create_sample.py @@ -144,7 +144,7 @@ def construct_sample_set_object( if test: alias = XmlAttribute( f"{entry["accession"]}:{organism}:{config.unique_project_suffix}:{datetime.now(tz=pytz.utc)}" - ) # TODO(https://github.com/loculus-project/loculus/issues/2425): remove in production + ) else: alias = XmlAttribute(f"{entry["accession"]}:{organism}:{config.unique_project_suffix}") list_sample_attributes = get_sample_attributes(config, sample_metadata, entry) @@ -268,7 +268,9 @@ def submission_table_update(db_config: SimpleConnectionPool): raise RuntimeError(error_msg) -def sample_table_create(db_config: SimpleConnectionPool, config: Config, retry_number: int = 3): +def sample_table_create( + db_config: SimpleConnectionPool, config: Config, retry_number: int = 3, test: bool = False +): """ 1. Find all entries in sample_table in state READY 2. Create sample_set_object: use metadata, center_name, organism, and ingest fields @@ -276,6 +278,9 @@ def sample_table_create(db_config: SimpleConnectionPool, config: Config, retry_n 3. Update sample_table to state SUBMITTING (only proceed if update succeeds) 4. If (create_ena_sample succeeds): update state to SUBMITTED with results 3. Else update state to HAS_ERRORS with error messages + + If test=True add a timestamp to the alias suffix to allow for multiple submissions of the same + sample for testing. """ ena_config = get_ena_config( config.ena_submission_username, @@ -295,10 +300,7 @@ def sample_table_create(db_config: SimpleConnectionPool, config: Config, retry_n ) sample_set = construct_sample_set_object( - config, - sample_data_in_submission_table[0], - row, - test=True, # TODO(https://github.com/loculus-project/loculus/issues/2425): remove in production + config, sample_data_in_submission_table[0], row, test ) update_values = { "status": Status.SUBMITTING, @@ -408,7 +410,13 @@ def sample_table_handle_errors( required=True, type=click.Path(exists=True), ) -def create_sample(log_level, config_file): +@click.option( + "--test", + is_flag=True, + default=False, + help="Allow multiple submissions of the same project for testing", +) +def create_sample(log_level, config_file, test=False): logger.setLevel(log_level) logging.getLogger("requests").setLevel(logging.INFO) @@ -429,7 +437,7 @@ def create_sample(log_level, config_file): submission_table_start(db_config) submission_table_update(db_config) - sample_table_create(db_config, config) + sample_table_create(db_config, config, test=test) sample_table_handle_errors(db_config, config, slack_config) time.sleep(2) diff --git a/ena-submission/scripts/ena_submission_helper.py b/ena-submission/scripts/ena_submission_helper.py index 2f3ff351a..df599e499 100644 --- a/ena-submission/scripts/ena_submission_helper.py +++ b/ena-submission/scripts/ena_submission_helper.py @@ -296,7 +296,7 @@ def create_manifest(manifest: AssemblyManifest) -> str: def post_webin_cli( - config: ENAConfig, manifest_filename, center_name=None + config: ENAConfig, manifest_filename, center_name=None, test=True ) -> subprocess.CompletedProcess: subprocess_args = [ "java", @@ -311,8 +311,8 @@ def post_webin_cli( "-manifest", manifest_filename, "-submit", - "-test", # TODO(https://github.com/loculus-project/loculus/issues/2425): remove in prod ] + subprocess_args.append("-test") if test else None if center_name: subprocess_args.extend(["-centername", center_name]) return subprocess.run( @@ -324,16 +324,17 @@ def post_webin_cli( def create_ena_assembly( - config: ENAConfig, manifest_filename: str, center_name=None + config: ENAConfig, manifest_filename: str, center_name=None, test=True ) -> CreationResults: """ This is equivalent to running: webin-cli -username {params.ena_submission_username} -password {params.ena_submission_password} -context genome -manifest {manifest_file} -submit + test=True, adds the `-test` flag which means submissions will use the ENA dev endpoint. """ errors = [] warnings = [] - response = post_webin_cli(config, manifest_filename, center_name=center_name) + response = post_webin_cli(config, manifest_filename, center_name=center_name, test=test) logger.info(response.stdout) if response.returncode != 0: error_message = ( @@ -366,7 +367,7 @@ def create_ena_assembly( return CreationResults(results=assembly_results, errors=errors, warnings=warnings) -def check_ena(config: ENAConfig, erz_accession: str) -> CreationResults: +def check_ena(config: ENAConfig, erz_accession: str, segment_order: list[str]) -> CreationResults: """ This is equivalent to running: curl -X 'GET' \ @@ -378,6 +379,7 @@ def check_ena(config: ENAConfig, erz_accession: str) -> CreationResults: errors = [] warnings = [] + assembly_results = {"segment_order": segment_order} try: response = requests.get( url, @@ -407,11 +409,45 @@ def check_ena(config: ENAConfig, erz_accession: str) -> CreationResults: acc_list = entry["acc"].split(",") acc_dict = {a.split(":")[0]: a.split(":")[-1] for a in acc_list} if "genome" not in acc_dict: + logger.error("Unexpected response format: genome not in acc_dict") raise requests.exceptions.RequestException gca_accession = acc_dict["genome"] if "chromosomes" not in acc_dict: + logger.error("Unexpected response format: chromosome not in acc_dict") raise requests.exceptions.RequestException - insdc_accession = acc_dict["chromosomes"] + insdc_accession_range = acc_dict["chromosomes"] + if len(segment_order) == 1 and len(insdc_accession_range.split("-")) == 0: + assembly_results["insdc_accession"] = insdc_accession_range + else: + start_letters = insdc_accession_range.split("-")[0][:2] + start_digit = 10 ** ( + len(insdc_accession_range.split("-")[0]) - 2 + ) # after letters accession can start with 0 + insdc_accession_start_int = start_digit + int( + insdc_accession_range.split("-")[0][2:] + ) + insdc_accession_end_int = start_digit + int( + insdc_accession_range.split("-")[-1][2:] + ) + if insdc_accession_end_int - insdc_accession_start_int != len(segment_order) - 1: + logger.error( + "Unexpected response format: chromosome does not have expected number of segments" + ) + raise requests.exceptions.RequestException + insdc_accession_base_dict = { + ("insdc_accession_" + segment): ( + start_letters + str(insdc_accession_start_int + i)[1:] + ) + for i, segment in enumerate(segment_order) + } + insdc_accession_full_dict = { + ("insdc_accession_full_" + segment): ( + start_letters + str(insdc_accession_start_int + i)[1:] + ".1" + ) + for i, segment in enumerate(segment_order) + } # set version to 1 by default + assembly_results.update(insdc_accession_base_dict) + assembly_results.update(insdc_accession_full_dict) else: return CreationResults(results=None, errors=errors, warnings=warnings) except: @@ -422,9 +458,10 @@ def check_ena(config: ENAConfig, erz_accession: str) -> CreationResults: logger.warning(error_message) errors.append(error_message) return CreationResults(results=None, errors=errors, warnings=warnings) - assembly_results = { - "erz_accession": erz_accession, - "gca_accession": gca_accession, - "insdc_accession": insdc_accession, - } + assembly_results.update( + { + "erz_accession": erz_accession, + "gca_accession": gca_accession, + } + ) return CreationResults(results=assembly_results, errors=errors, warnings=warnings) diff --git a/ena-submission/scripts/upload_external_metadata_to_loculus.py b/ena-submission/scripts/upload_external_metadata_to_loculus.py index ed10cc8b4..bd81ba87d 100644 --- a/ena-submission/scripts/upload_external_metadata_to_loculus.py +++ b/ena-submission/scripts/upload_external_metadata_to_loculus.py @@ -85,6 +85,19 @@ def get_external_metadata(db_config: SimpleConnectionPool, entry: dict[str, Any] data["externalMetadata"]["gcaAccession"] = corresponding_assembly[0]["result"][ "gca_accession" ] + insdc_accession_keys = [ + key + for key in corresponding_assembly[0]["result"] + if key.startswith("insdc_accession_full") + ] + segments = [key[len("insdc_accession_full") :] for key in insdc_accession_keys] + for segment in segments: + data["externalMetadata"]["insdcAccessionBase" + segment] = corresponding_assembly[0][ + "result" + ]["insdc_accession" + segment] + data["externalMetadata"]["insdcAccessionFull" + segment] = corresponding_assembly[0][ + "result" + ]["insdc_accession_full" + segment] else: raise Exception return data From ae3e391debe94ce7bf181b40733d713cf4db5ab8 Mon Sep 17 00:00:00 2001 From: "Anna (Anya) Parker" <50943381+anna-parker@users.noreply.github.com> Date: Tue, 24 Sep 2024 17:38:32 +0200 Subject: [PATCH 18/20] feat(deployment, ena-submission, ci): Update ena-submission config to include submitToEnaProduction flag (#2874) * Adds the field submitToEnaProduction to the values.yaml config (when set to true the ena submission pod will send requests to ena prod) * Updates the deploy script to include the ena-submission.yaml config for easier debugging * Adds github actions that will block the creation of an ena-submission-image with this flag for loculus (preventing previews from sending requests to ena prod, but still allowing previews to send requests to ena dev using the test user credentials we have in sealed secrets). * Adds an additional list of allowed hosts to the defaults.yaml config to prevent edge case where field is changed after preview is live (config would still be updated by reloader and pods would talk to prod even if check fails). --- .github/workflows/ena-submission-image.yaml | 13 ++++++++++ .github/workflows/update-argocd-metadata.yml | 7 +++++ deploy.py | 26 +++++++++++++------ ena-submission/Snakefile | 3 +++ ena-submission/config/defaults.yaml | 4 ++- .../loculus/templates/_common-metadata.tpl | 4 +-- .../templates/ena-submission-config.yaml | 6 +++++ kubernetes/loculus/values.yaml | 3 +++ 8 files changed, 55 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ena-submission-image.yaml b/.github/workflows/ena-submission-image.yaml index 3524f9068..dba5e55bf 100644 --- a/.github/workflows/ena-submission-image.yaml +++ b/.github/workflows/ena-submission-image.yaml @@ -36,6 +36,19 @@ jobs: packages: write checks: read steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Validate submitToEnaProduction is not true in values.yaml + run: | + python -c " + import yaml + with open('kubernetes/loculus/values.yaml', 'r') as file: + values = yaml.safe_load(file) + submit_to_ena_prod = values.get('submitToEnaProduction', False) + if submit_to_ena_prod: + print('Error: The flag submitToEnaProduction is set to true - this will submit data to ENA production. Please set it to false in values.yaml') + exit(1) + " - name: Shorten sha run: echo "sha=${sha::7}" >> $GITHUB_ENV - uses: actions/checkout@v4 diff --git a/.github/workflows/update-argocd-metadata.yml b/.github/workflows/update-argocd-metadata.yml index 19f902864..1434639ff 100644 --- a/.github/workflows/update-argocd-metadata.yml +++ b/.github/workflows/update-argocd-metadata.yml @@ -74,6 +74,13 @@ jobs: check-name: Build keycloakify Docker Image repo-token: ${{ secrets.GITHUB_TOKEN }} wait-interval: 2 + - name: Wait for ENA Submission Docker Image + uses: lewagon/wait-on-check-action@v1.3.4 + with: + ref: ${{ github.sha }} + check-name: Build ena-submission Docker Image + repo-token: ${{ secrets.GITHUB_TOKEN }} + wait-interval: 2 # End of wait block - name: Checkout External Repository uses: actions/checkout@v4 diff --git a/deploy.py b/deploy.py index 715a820bf..5dc1fe7a1 100755 --- a/deploy.py +++ b/deploy.py @@ -109,7 +109,6 @@ ) - args = parser.parse_args() @@ -258,7 +257,6 @@ def get_codespace_name(): def generate_configs(from_live, live_host): temp_dir_path = Path(tempfile.mkdtemp()) - print(f"Unprocessed config available in temp dir: {temp_dir_path}") helm_chart = str(HELM_CHART_DIR) @@ -273,7 +271,7 @@ def generate_configs(from_live, live_host): backend_config_path, codespace_name, from_live, - live_host + live_host, ) website_config_path = temp_dir_path / "website_config.json" @@ -283,7 +281,7 @@ def generate_configs(from_live, live_host): website_config_path, codespace_name, from_live, - live_host + live_host, ) runtime_config_path = temp_dir_path / "runtime_config.json" @@ -293,7 +291,19 @@ def generate_configs(from_live, live_host): runtime_config_path, codespace_name, from_live, - live_host + live_host, + ) + + ena_submission_configmap_path = temp_dir_path / "config.yaml" + ena_submission_configout_path = temp_dir_path / "ena-submission-config.yaml" + generate_config( + helm_chart, + "templates/ena-submission-config.yaml", + ena_submission_configmap_path, + codespace_name, + from_live, + live_host, + ena_submission_configout_path, ) ingest_configmap_path = temp_dir_path / "config.yaml" @@ -306,7 +316,7 @@ def generate_configs(from_live, live_host): codespace_name, from_live, live_host, - ingest_configout_path + ingest_configout_path, ) prepro_configmap_path = temp_dir_path / "preprocessing-config.yaml" @@ -319,7 +329,7 @@ def generate_configs(from_live, live_host): codespace_name, from_live, live_host, - prepro_configout_path + prepro_configout_path, ) run_command( @@ -344,7 +354,7 @@ def generate_config( ): if from_live and live_host: number_of_dots = live_host.count(".") - if number_of_dots < 2: # this is an imperfect hack + if number_of_dots < 2: # this is an imperfect hack raise ValueError("Currently only subdomains are supported as live-hosts") # To be able to cope with top level domains we need more logic to use the right subdomain separator - but we should probably avoid this anyway as we shouldn't use production domains helm_template_cmd = [ diff --git a/ena-submission/Snakefile b/ena-submission/Snakefile index 32ec0d227..fa9816d2f 100644 --- a/ena-submission/Snakefile +++ b/ena-submission/Snakefile @@ -15,6 +15,9 @@ for key, value in defaults.items(): LOG_LEVEL = config.get("log_level", "INFO") SUBMIT_TO_ENA_PROD = config.get("submit_to_ena_prod", False) +if config.get("backend_url", "") not in config.get("allowed_submission_hosts", []): + print("WARNING: backend_url not in allowed_hosts") + SUBMIT_TO_ENA_PROD = False SUBMIT_TO_ENA_DEV = not SUBMIT_TO_ENA_PROD if SUBMIT_TO_ENA_DEV: diff --git a/ena-submission/config/defaults.yaml b/ena-submission/config/defaults.yaml index 3dfa7d682..b3e2b40f6 100644 --- a/ena-submission/config/defaults.yaml +++ b/ena-submission/config/defaults.yaml @@ -6,7 +6,9 @@ db_name: Loculus unique_project_suffix: Loculus ena_submission_username: fake-user ena_submission_password: fake-password -submit_to_ena_prod: False # TODO(https://github.com/loculus-project/loculus/issues/2425): update in production +submit_to_ena_prod: False +allowed_submission_hosts: + - https://backend.pathoplexus.org #ena_checklist: ERC000033 - do not use until all fields are mapped to ENA accepted options metadata_mapping: 'subject exposure': diff --git a/kubernetes/loculus/templates/_common-metadata.tpl b/kubernetes/loculus/templates/_common-metadata.tpl index cb8979eba..213d7ef5e 100644 --- a/kubernetes/loculus/templates/_common-metadata.tpl +++ b/kubernetes/loculus/templates/_common-metadata.tpl @@ -367,8 +367,8 @@ organisms: organismName: {{ quote .organismName }} externalMetadata: {{- $args := dict "metadata" (include "loculus.patchMetadataSchema" . | fromYaml).metadata "nucleotideSequences" $nucleotideSequences}} - {{ $metadata := include "loculus.generateBackendExternalMetadata" $args | fromYaml }} - {{ $metadata.fields | default list | toYaml | nindent 8 }} + {{- $metadata := include "loculus.generateBackendExternalMetadata" $args | fromYaml }} + {{- $metadata.fields | default list | toYaml | nindent 8 }} {{- end }} {{- end }} {{- end }} diff --git a/kubernetes/loculus/templates/ena-submission-config.yaml b/kubernetes/loculus/templates/ena-submission-config.yaml index 62eecd2ac..32ba43c5e 100644 --- a/kubernetes/loculus/templates/ena-submission-config.yaml +++ b/kubernetes/loculus/templates/ena-submission-config.yaml @@ -1,6 +1,9 @@ {{- $testconfig := .Values.testconfig | default false }} {{- $backendHost := .Values.environment | eq "server" | ternary (printf "https://backend%s%s" .Values.subdomainSeparator $.Values.host) ($testconfig | ternary "http://localhost:8079" "http://loculus-backend-service:8079") }} {{- $keycloakHost := .Values.environment | eq "server" | ternary (printf "https://authentication%s%s" $.Values.subdomainSeparator $.Values.host) ($testconfig | ternary "http://localhost:8083" "http://loculus-keycloak-service:8083") }} +{{- $submitToEnaProduction := .Values.submitToEnaProduction | default false }} +{{- $enaDbName := .Values.enaDbName | default false }} +{{- $enaUniqueSuffix := .Values.enaUniqueSuffix | default false }} --- apiVersion: v1 kind: ConfigMap @@ -8,6 +11,9 @@ metadata: name: loculus-ena-submission-config data: config.yaml: | + submit_to_ena_prod: {{ $submitToEnaProduction }} + db_name: {{ $enaDbName }} + unique_project_suffix: {{ $enaUniqueSuffix }} backend_url: {{ $backendHost }} keycloak_token_url: {{ $keycloakHost -}}/realms/loculus/protocol/openid-connect/token {{- include "loculus.generateENASubmissionConfig" . | nindent 4 }} \ No newline at end of file diff --git a/kubernetes/loculus/values.yaml b/kubernetes/loculus/values.yaml index 679297a5a..84bd1242a 100644 --- a/kubernetes/loculus/values.yaml +++ b/kubernetes/loculus/values.yaml @@ -1517,6 +1517,9 @@ enforceHTTPS: true registrationTermsMessage: > You must agree to the terms of use. +submitToEnaProduction: false +enaDbName: Loculus +enaUniqueSuffix: Loculus subdomainSeparator: "-" replicas: website: 1 From a6a0c9fd9e1d8f1fdf87f16329c59775546d12a5 Mon Sep 17 00:00:00 2001 From: "Anna (Anya) Parker" <50943381+anna-parker@users.noreply.github.com> Date: Wed, 25 Sep 2024 14:04:52 +0200 Subject: [PATCH 19/20] feat(ena-submission): Reduce ena submission induced db load (#2875) * Reduce the size of connection pools (for connections to the postgres db) used by each snakemake rule from max4 to max2. * Increase sleep period after each iteration of snakemake rules from 2 to 10seconds. * Increase period between checking github for new data from 1 to 2min. * Improve error handling when requests to ENA fail (I had tests for this but they were wrong - errors on main currently due to incorrect error handling). * Make all polling wait periods customizable via the config --- ena-submission/Snakefile | 16 ++++++++++ ena-submission/config/defaults.yaml | 3 ++ ena-submission/scripts/create_assembly.py | 18 +++++++++-- ena-submission/scripts/create_project.py | 17 +++++++--- ena-submission/scripts/create_sample.py | 9 ++++-- .../scripts/ena_submission_helper.py | 31 ++++++++----------- .../scripts/submission_db_helper.py | 2 +- ena-submission/scripts/test_ena_submission.py | 2 +- .../scripts/trigger_submission_to_ena.py | 11 +++++-- .../upload_external_metadata_to_loculus.py | 9 ++++-- 10 files changed, 85 insertions(+), 33 deletions(-) diff --git a/ena-submission/Snakefile b/ena-submission/Snakefile index fa9816d2f..0c621d1ce 100644 --- a/ena-submission/Snakefile +++ b/ena-submission/Snakefile @@ -14,6 +14,10 @@ for key, value in defaults.items(): config[key] = value LOG_LEVEL = config.get("log_level", "INFO") +TIME_BETWEEN_ITERATIONS = config.get("time_between_iterations", 10) +MIN_BETWEEN_GITHUB_REQUESTS = config.get("min_between_github_requests", 2) +MIN_BETWEEN_ENA_CHECKS = config.get("min_between_ena_checks", 5) + SUBMIT_TO_ENA_PROD = config.get("submit_to_ena_prod", False) if config.get("backend_url", "") not in config.get("allowed_submission_hosts", []): print("WARNING: backend_url not in allowed_hosts") @@ -76,11 +80,13 @@ rule trigger_submission_to_ena: submitted=touch("results/triggered"), params: log_level=LOG_LEVEL, + min_between_github_requests=MIN_BETWEEN_GITHUB_REQUESTS, shell: """ python {input.script} \ --config-file {input.config} \ --log-level {params.log_level} \ + --min-between-github-requests {params.min_between_github_requests} """ @@ -111,11 +117,13 @@ rule create_project: params: log_level=LOG_LEVEL, test_flag="--test" if SUBMIT_TO_ENA_DEV else "", + time_between_iterations=TIME_BETWEEN_ITERATIONS, shell: """ python {input.script} \ --config-file {input.config} \ --log-level {params.log_level} \ + --time-between-iterations {params.time_between_iterations} \ {params.test_flag} """ @@ -129,11 +137,13 @@ rule create_sample: params: log_level=LOG_LEVEL, test_flag="--test" if SUBMIT_TO_ENA_DEV else "", + time_between_iterations=TIME_BETWEEN_ITERATIONS, shell: """ python {input.script} \ --config-file {input.config} \ --log-level {params.log_level} \ + --time-between-iterations {params.time_between_iterations} \ {params.test_flag} """ @@ -147,11 +157,15 @@ rule create_assembly: params: log_level=LOG_LEVEL, test_flag="--test" if SUBMIT_TO_ENA_DEV else "", + time_between_iterations=TIME_BETWEEN_ITERATIONS, + min_between_ena_checks=MIN_BETWEEN_ENA_CHECKS, shell: """ python {input.script} \ --config-file {input.config} \ --log-level {params.log_level} \ + --time-between-iterations {params.time_between_iterations} \ + --min-between-ena-checks {params.min_between_ena_checks} \ {params.test_flag} """ @@ -164,9 +178,11 @@ rule upload_to_loculus: sample_created=touch("results/uploaded_external_metadata"), params: log_level=LOG_LEVEL, + time_between_iterations=TIME_BETWEEN_ITERATIONS, shell: """ python {input.script} \ --config-file {input.config} \ --log-level {params.log_level} \ + --time-between-iterations {params.time_between_iterations} \ """ diff --git a/ena-submission/config/defaults.yaml b/ena-submission/config/defaults.yaml index b3e2b40f6..34df8e64d 100644 --- a/ena-submission/config/defaults.yaml +++ b/ena-submission/config/defaults.yaml @@ -9,6 +9,9 @@ ena_submission_password: fake-password submit_to_ena_prod: False allowed_submission_hosts: - https://backend.pathoplexus.org +time_between_iterations: 10 +min_between_github_requests: 2 +min_between_ena_checks: 5 #ena_checklist: ERC000033 - do not use until all fields are mapped to ENA accepted options metadata_mapping: 'subject exposure': diff --git a/ena-submission/scripts/create_assembly.py b/ena-submission/scripts/create_assembly.py index bb98111f3..7b4ec80d5 100644 --- a/ena-submission/scripts/create_assembly.py +++ b/ena-submission/scripts/create_assembly.py @@ -543,7 +543,19 @@ def assembly_table_handle_errors( default=False, help="Allow multiple submissions of the same project for testing AND use the webin-cli test endpoint", ) -def create_assembly(log_level, config_file, test=False): +@click.option( + "--time-between-iterations", + default=10, + type=int, +) +@click.option( + "--min-between-ena-checks", + default=5, + type=int, +) +def create_assembly( + log_level, config_file, test=False, time_between_iterations=10, min_between_ena_checks=5 +): logger.setLevel(log_level) logging.getLogger("requests").setLevel(logging.INFO) @@ -565,9 +577,9 @@ def create_assembly(log_level, config_file, test=False): submission_table_update(db_config) assembly_table_create(db_config, config, retry_number=3, test=test) - assembly_table_update(db_config, config) + assembly_table_update(db_config, config, time_threshold=min_between_ena_checks) assembly_table_handle_errors(db_config, config, slack_config) - time.sleep(2) + time.sleep(time_between_iterations) if __name__ == "__main__": diff --git a/ena-submission/scripts/create_project.py b/ena-submission/scripts/create_project.py index b3e617628..96c7c50c6 100644 --- a/ena-submission/scripts/create_project.py +++ b/ena-submission/scripts/create_project.py @@ -272,7 +272,9 @@ def project_table_create( ) ) continue - logger.info(f"Starting Project creation for group_id {row["group_id"]}") + logger.info( + f"Starting Project creation for group_id {row["group_id"]} organism {row["organism"]}" + ) project_creation_results: CreationResults = create_ena_project(ena_config, project_set) if project_creation_results.results: update_values = { @@ -296,7 +298,9 @@ def project_table_create( ) tries += 1 if number_rows_updated == 1: - logger.info(f"Project creation for group_id {row["group_id"]} succeeded!") + logger.info( + f"Project creation for group_id {row["group_id"]} organism {row["organism"]} succeeded!" + ) else: update_values = { "status": Status.HAS_ERRORS, @@ -370,7 +374,12 @@ def project_table_handle_errors( default=False, help="Allow multiple submissions of the same project for testing", ) -def create_project(log_level, config_file, test=False): +@click.option( + "--time-between-iterations", + default=10, + type=int, +) +def create_project(log_level, config_file, test=False, time_between_iterations=10): logger.setLevel(log_level) logging.getLogger("requests").setLevel(logging.INFO) @@ -393,7 +402,7 @@ def create_project(log_level, config_file, test=False): project_table_create(db_config, config, test=test) project_table_handle_errors(db_config, config, slack_config) - time.sleep(2) + time.sleep(time_between_iterations) if __name__ == "__main__": diff --git a/ena-submission/scripts/create_sample.py b/ena-submission/scripts/create_sample.py index dee1b5936..e1aee5f15 100644 --- a/ena-submission/scripts/create_sample.py +++ b/ena-submission/scripts/create_sample.py @@ -416,7 +416,12 @@ def sample_table_handle_errors( default=False, help="Allow multiple submissions of the same project for testing", ) -def create_sample(log_level, config_file, test=False): +@click.option( + "--time-between-iterations", + default=10, + type=int, +) +def create_sample(log_level, config_file, test=False, time_between_iterations=10): logger.setLevel(log_level) logging.getLogger("requests").setLevel(logging.INFO) @@ -439,7 +444,7 @@ def create_sample(log_level, config_file, test=False): sample_table_create(db_config, config, test=test) sample_table_handle_errors(db_config, config, slack_config) - time.sleep(2) + time.sleep(time_between_iterations) if __name__ == "__main__": diff --git a/ena-submission/scripts/ena_submission_helper.py b/ena-submission/scripts/ena_submission_helper.py index df599e499..eeccf42a5 100644 --- a/ena-submission/scripts/ena_submission_helper.py +++ b/ena-submission/scripts/ena_submission_helper.py @@ -128,13 +128,10 @@ def get_project_xml(project_set): } xml = get_project_xml(project_set) - try: - response = post_webin(config, xml) - response.raise_for_status() - except requests.exceptions.RequestException as e: + response = post_webin(config, xml) + if not response.ok: error_message = ( - f"Request failed with status:{response.status_code}. Message: {e}. " - f"Response: {response.text}." + f"Request failed with status:{response.status_code}. " f"Response: {response.text}." ) logger.warning(error_message) errors.append(error_message) @@ -181,10 +178,8 @@ def get_sample_xml(sample_set): return files xml = get_sample_xml(sample_set) - try: - response = post_webin(config, xml) - response.raise_for_status() - except requests.exceptions.RequestException: + response = post_webin(config, xml) + if not response.ok: error_message = ( f"Request failed with status:{response.status_code}. " f"Request: {response.request}, Response: {response.text}" @@ -380,14 +375,14 @@ def check_ena(config: ENAConfig, erz_accession: str, segment_order: list[str]) - errors = [] warnings = [] assembly_results = {"segment_order": segment_order} - try: - response = requests.get( - url, - auth=HTTPBasicAuth(config.ena_submission_username, config.ena_submission_password), - timeout=10, # wait a full 10 seconds for a response incase slow - ) - response.raise_for_status() - except requests.exceptions.RequestException: + + response = requests.get( + url, + auth=HTTPBasicAuth(config.ena_submission_username, config.ena_submission_password), + timeout=10, # wait a full 10 seconds for a response incase slow + ) + response.raise_for_status() + if not response.ok: error_message = ( f"ENA check failed with status:{response.status_code}. " f"Request: {response.request}, Response: {response.text}" diff --git a/ena-submission/scripts/submission_db_helper.py b/ena-submission/scripts/submission_db_helper.py index 4b9c4138b..f93cbf38c 100644 --- a/ena-submission/scripts/submission_db_helper.py +++ b/ena-submission/scripts/submission_db_helper.py @@ -26,7 +26,7 @@ def db_init( return SimpleConnectionPool( minconn=1, - maxconn=4, # max 7*4 connections to db allowed + maxconn=2, # max 7*2 connections to db allowed dbname="loculus", user=db_username, host=db_host, diff --git a/ena-submission/scripts/test_ena_submission.py b/ena-submission/scripts/test_ena_submission.py index d70dda7d5..70667ae4e 100644 --- a/ena-submission/scripts/test_ena_submission.py +++ b/ena-submission/scripts/test_ena_submission.py @@ -93,6 +93,7 @@ def mock_requests_post(status_code, text): mock_response = mock.Mock() mock_response.status_code = status_code mock_response.text = text + mock_response.ok = mock_response.status_code < 400 return mock_response @@ -122,7 +123,6 @@ def test_create_project_xml_failure(self, mock_post): def test_create_project_server_failure(self, mock_post): # Testing project creation failure mock_post.return_value = mock_requests_post(500, "Internal Server Error") - mock_post.return_value.raise_for_status.side_effect = exceptions.RequestException() project_set = default_project_type() response = create_ena_project(test_ena_config, project_set) error_message_part = "Request failed with status:500" diff --git a/ena-submission/scripts/trigger_submission_to_ena.py b/ena-submission/scripts/trigger_submission_to_ena.py index c9bc568ba..081ce7bfc 100644 --- a/ena-submission/scripts/trigger_submission_to_ena.py +++ b/ena-submission/scripts/trigger_submission_to_ena.py @@ -77,7 +77,14 @@ def upload_sequences(db_config: SimpleConnectionPool, sequences_to_upload: dict[ required=False, type=click.Path(), ) -def trigger_submission_to_ena(log_level, config_file, input_file=None): +@click.option( + "--min-between-github-requests", + default=2, + type=int, +) +def trigger_submission_to_ena( + log_level, config_file, input_file=None, min_between_github_requests=2 +): logger.setLevel(log_level) logging.getLogger("requests").setLevel(logging.INFO) @@ -109,7 +116,7 @@ def trigger_submission_to_ena(log_level, config_file, input_file=None): error_msg = f"Failed to retrieve file: {response.status_code}" logger.error(error_msg) upload_sequences(db_config, sequences_to_upload) - time.sleep(60) # Sleep for 1min to not overwhelm github + time.sleep(min_between_github_requests * 60) # Sleep for x min to not overwhelm github if __name__ == "__main__": diff --git a/ena-submission/scripts/upload_external_metadata_to_loculus.py b/ena-submission/scripts/upload_external_metadata_to_loculus.py index bd81ba87d..8e6178d2e 100644 --- a/ena-submission/scripts/upload_external_metadata_to_loculus.py +++ b/ena-submission/scripts/upload_external_metadata_to_loculus.py @@ -208,7 +208,12 @@ def upload_handle_errors( required=True, type=click.Path(exists=True), ) -def upload_external_metadata(log_level, config_file): +@click.option( + "--time-between-iterations", + default=10, + type=int, +) +def upload_external_metadata(log_level, config_file, time_between_iterations=10): logger.setLevel(log_level) logging.getLogger("requests").setLevel(logging.INFO) @@ -231,7 +236,7 @@ def upload_external_metadata(log_level, config_file): config, slack_config, ) - time.sleep(2) + time.sleep(time_between_iterations) if __name__ == "__main__": From ec5ce2491e5fcad1033f9367995306b39c484cb2 Mon Sep 17 00:00:00 2001 From: "Anna (Anya) Parker" <50943381+anna-parker@users.noreply.github.com> Date: Thu, 26 Sep 2024 14:45:45 +0200 Subject: [PATCH 20/20] fix(ingest): Update metadata hash calculation, do not include empty fields in hash (#2877) --- ingest/README.md | 2 ++ ingest/scripts/group_segments.py | 7 ++++++- ingest/scripts/prepare_metadata.py | 7 ++++++- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/ingest/README.md b/ingest/README.md index d85a6451f..6669f8ad0 100644 --- a/ingest/README.md +++ b/ingest/README.md @@ -42,6 +42,8 @@ Every sequence entry is to be uploaded only once and must be ignored by future p To achieve this, an md5 hash is generated for each sequence entry based on the post-transform metadata and sequence content. The hash is based on all metadata fields submitted to Loculus as well as the sequence. Hence, changes to the ingest pipeline's transform step (above) can lead to changes in hash and resubmission - even without underlying data change on INSDC. Likewise, some changes to the INSDC data might not cause a sequence update on Loculus if what has been changed does not affect the post-transformed metadata. +To allow for addition and removal of metadata fields without a version bump across all samples we only take a hash of fields with a value. For example the hash of a sample where the field "is_lab_host" is empty is equal to the hash of that same sample without the "is_lab_host" field. + For segmented viruses we calculate the md5 hash of each segment and then, after grouping segments we concatenate the hashes of each segment before again hashing the hashes. ### Grouping segmented viruses diff --git a/ingest/scripts/group_segments.py b/ingest/scripts/group_segments.py index 2e129556a..2b766d473 100644 --- a/ingest/scripts/group_segments.py +++ b/ingest/scripts/group_segments.py @@ -232,8 +232,13 @@ def main( row["submissionId"] = joint_key + # Hash of all metadata fields should be the same if + # 1. field is not in keys_to_keep and + # 2. field is in keys_to_keep but is "" or None + filtered_record = {k: str(v) for k, v in row.items() if v is not None and str(v)} + row["hash"] = hashlib.md5( - json.dumps(row, sort_keys=True).encode(), usedforsecurity=False + json.dumps(filtered_record, sort_keys=True).encode(), usedforsecurity=False ).hexdigest() metadata[joint_key] = row diff --git a/ingest/scripts/prepare_metadata.py b/ingest/scripts/prepare_metadata.py index 18c971a84..b440a22eb 100644 --- a/ingest/scripts/prepare_metadata.py +++ b/ingest/scripts/prepare_metadata.py @@ -138,7 +138,12 @@ def main( msg = f"No hash found for {record[config.fasta_id_field]}" raise ValueError(msg) - metadata_dump = json.dumps(record, sort_keys=True) + # Hash of all metadata fields should be the same if + # 1. field is not in keys_to_keep and + # 2. field is in keys_to_keep but is "" or None + filtered_record = {k: str(v) for k, v in record.items() if v is not None and str(v)} + + metadata_dump = json.dumps(filtered_record, sort_keys=True) prehash = metadata_dump + sequence_hash record["hash"] = hashlib.md5(prehash.encode(), usedforsecurity=False).hexdigest()