diff --git a/acceptance_tests/common/setup.sh b/acceptance_tests/common/setup.sh
index 49439b61..06b28136 100644
--- a/acceptance_tests/common/setup.sh
+++ b/acceptance_tests/common/setup.sh
@@ -4,6 +4,7 @@ mkdir -p workdir/hpans
mkdir -p workdir/ade-errors
mkdir -p workdir/output/pending
mkdir -p workdir/logs
+mkdir -p workdir/reports
git clone --depth 1 https://github.com/pagopa/cstar-cli.git
cd cstar-cli || exit
diff --git a/acceptance_tests/file_report/file_report.bash b/acceptance_tests/file_report/file_report.bash
new file mode 100755
index 00000000..b7c773b4
--- /dev/null
+++ b/acceptance_tests/file_report/file_report.bash
@@ -0,0 +1,98 @@
+#!/bin/bash
+
+if [ $# -lt 1 ] ; then
+ echo "Illegal number of parameters (1 mandatory, was $#)" >&1
+ echo "usage: bash script_splitting.bash env [timeout_in_minutes]" >&1
+ exit 2
+fi
+
+if [ "$1" != "uat" ] && [ "$1" != "dev" ]
+then
+ echo "only dev and uat available for this test!"
+ exit 2
+fi
+
+ENV=$1
+# timeout default 5minutes
+TIMEOUT_IN_MINUTES="${2:-5}"
+
+sh ../common/setup.sh
+
+N_AGGREGATES=8
+
+generate_input_file() {
+ cd cstar-cli || exit
+ echo "Generating input file..."
+ poetry run cst sender aggregates --sender 12345 --action trx_and_aggr --aggr-qty $N_AGGREGATES --avg-trx 3
+ cd ..
+ FILENAME=$(basename cstar-cli/generated/*.csv)
+ echo "Generated: $FILENAME"
+ cp cstar-cli/generated/"$FILENAME" ./workdir/input/"$FILENAME"
+}
+run_batch_service() {
+ java -jar ../common/rtd-ms-transaction-filter.jar
+}
+get_file_sent_occurrence_in_report() {
+ OUTPUT_FILENAME=$(basename workdir/output/ADE.*)
+ FILE_REPORT_NAME=$(ls -v workdir/reports | tail -n 1)
+ FILE_SENT_OCCURRANCE_IN_REPORT=$(grep -c "$OUTPUT_FILENAME" < workdir/reports/"$FILE_REPORT_NAME")
+}
+
+### file generation
+generate_input_file
+
+### batch service configuration
+# shellcheck source=../common/setenv_env.sh
+source ../common/setenv_"$ENV".sh
+source setenv.sh
+
+echo "Executing batch service..."
+### batch service run
+run_batch_service
+
+#### ASSERTIONS
+
+# the file sent is not in the report
+get_file_sent_occurrence_in_report
+if [ "$FILE_SENT_OCCURRANCE_IN_REPORT" != 0 ]
+then
+ echo "File sent has been found in report but it was not supposed to: [FAILED]"
+ exit 2
+fi
+
+# check if file has been uploaded
+N_UPLOADS=$(grep -c "uploaded with success (status was: 201)" < workdir/logs/application.log)
+if [ "$N_UPLOADS" -ne 1 ]
+then
+ echo "Upload test not passed, $N_UPLOADS files uploaded: [FAILED]"
+ exit 2
+else
+ echo "Files uploaded with success: [SUCCESS]"
+fi
+
+SLEEP_INTERVAL_IN_SECONDS=10
+
+#set batch service send to false in order to not send the placeholder files
+export ACQ_BATCH_TRX_SENDER_ADE_ENABLED=false
+for (( i=0 ; i <= TIMEOUT_IN_MINUTES * 60 / SLEEP_INTERVAL_IN_SECONDS; i++))
+do
+ echo "Waiting $SLEEP_INTERVAL_IN_SECONDS seconds..."
+ sleep $SLEEP_INTERVAL_IN_SECONDS
+
+ # run batch service with dummy file
+ cp cstar-cli/generated/"$FILENAME" ./workdir/input/"$FILENAME"
+ run_batch_service
+
+ # check if file sent in the previous run has been received inside the report
+ get_file_sent_occurrence_in_report
+
+ # if report does contain the file sent the exit loop
+ if [ "$FILE_SENT_OCCURRANCE_IN_REPORT" -gt 0 ]
+ then
+ echo "file found in report: [SUCCESS]"
+ break
+ fi
+
+done
+
+exit 0
diff --git a/acceptance_tests/file_report/file_report_more_sender_codes.bash b/acceptance_tests/file_report/file_report_more_sender_codes.bash
new file mode 100755
index 00000000..e3fb6a81
--- /dev/null
+++ b/acceptance_tests/file_report/file_report_more_sender_codes.bash
@@ -0,0 +1,117 @@
+#!/bin/bash
+
+if [ $# -lt 1 ] ; then
+ echo "Illegal number of parameters (1 mandatory, was $#)" >&1
+ echo "usage: bash script_splitting.bash env [timeout_in_minutes] [sender_code_1] [sender_code_2]" >&1
+ exit 2
+fi
+
+if [ "$1" != "uat" ] && [ "$1" != "dev" ]
+then
+ echo "only dev and uat available for this test!"
+ exit 2
+fi
+
+
+ENV=$1
+# timeout default 5minutes
+TIMEOUT_IN_MINUTES="${2:-5}"
+
+# Set sender codes
+SENDER_CODE1="${3-12345}"
+SENDER_CODE2="${4-54321}"
+
+
+N_AGGREGATES=8
+
+REPORT_DIR=workdir/reports
+
+sh ../common/setup.sh
+
+echo "Make sure to have $SENDER_CODE1 and $SENDER_CODE2 associated with your API key"
+echo "Make sure to have empty cstar-cli/generated folder"
+
+generate_input_file() {
+ cd cstar-cli || exit
+ echo "Generating input file..."
+ poetry run cst sender aggregates --sender $SENDER_CODE1 --action trx_and_aggr --aggr-qty $N_AGGREGATES --avg-trx 3
+ poetry run cst sender aggregates --sender $SENDER_CODE2 --action trx_and_aggr --aggr-qty $N_AGGREGATES --avg-trx 3
+ cd ..
+ FILENAME=$(ls -1 cstar-cli/generated/*.csv | xargs -I {} basename {})
+ echo "Generated:"
+ echo "$FILENAME"
+ cp cstar-cli/generated/*.csv ./workdir/input/
+}
+run_batch_service() {
+ java -jar ../common/rtd-ms-transaction-filter.jar
+}
+get_file_sent_occurrence_in_report() {
+ OUTPUT_FILENAME=$(ls workdir/output | grep -E "ADE.*.pgp")
+ FILE_REPORT_NAME=$(ls -v "$REPORT_DIR" | tail -n 1)
+ FILES_SENT_OCCURRENCES_IN_REPORT=$(grep -c "$OUTPUT_FILENAME" < "$REPORT_DIR"/"$FILE_REPORT_NAME")
+}
+check_two_files_sent() {
+ # check if the files has been uploaded
+ N_UPLOADS=$(grep -c "uploaded with success (status was: 201)" < workdir/logs/application.log)
+ if [ "$N_UPLOADS" -ne 2 ]
+ then
+ echo "Upload test not passed, $N_UPLOADS files uploaded: [FAILED]"
+ exit 2
+ else
+ echo "Files uploaded with success: [SUCCESS]"
+ fi
+}
+
+### file generation
+generate_input_file
+
+### batch service configuration
+# shellcheck source=../common/setenv_env.sh
+source ../common/setenv_"$ENV".sh
+source setenv.sh
+
+# Disable batch scheduling for processing both files independently
+export ACQ_BATCH_SCHEDULED=false
+
+# Explicitly set the report directory
+export ACQ_BATCH_FILE_REPORT_PATH="$REPORT_DIR"
+
+# Batch service run for both files
+echo "Executing batch service..."
+run_batch_service
+run_batch_service
+
+#### ASSERTIONS
+check_two_files_sent
+
+SLEEP_INTERVAL_IN_SECONDS=10
+
+# Set batch service send to false in order to not send the placeholder files
+export ACQ_BATCH_TRX_SENDER_ADE_ENABLED=false
+
+# Wait for the report to be returned containing the two files
+for (( i=0 ; i <= TIMEOUT_IN_MINUTES * 60 / SLEEP_INTERVAL_IN_SECONDS; i++))
+do
+ echo "Waiting $SLEEP_INTERVAL_IN_SECONDS seconds..."
+ sleep $SLEEP_INTERVAL_IN_SECONDS
+
+ # run batch service with dummy file
+ cp cstar-cli/generated/*.csv ./workdir/input/
+ run_batch_service
+
+ # check if file sent in the previous run has been received inside the report
+ get_file_sent_occurrence_in_report
+
+ # if report does contain the files sent the exit loop
+ if [ "$FILES_SENT_OCCURRENCES_IN_REPORT" -eq 2 ]
+ then
+ echo "Files found in report: [SUCCESS]"
+ break
+ fi
+
+done
+
+rm -rf cstar-cli
+rm -rf workdir
+
+exit 0
diff --git a/acceptance_tests/file_report/file_report_without_folder.sh b/acceptance_tests/file_report/file_report_without_folder.sh
new file mode 100755
index 00000000..b2f9c714
--- /dev/null
+++ b/acceptance_tests/file_report/file_report_without_folder.sh
@@ -0,0 +1,81 @@
+#!/bin/sh
+
+if [ $# -lt 1 ] ; then
+ echo "Illegal number of parameters (1 mandatory, was $#)" >&1
+ echo "usage: bash script_splitting.bash env" >&1
+ exit 2
+fi
+
+if [ "$1" != "uat" ] && [ "$1" != "dev" ]
+then
+ echo "only dev and uat available for this test!"
+ exit 2
+fi
+
+ENV=$1
+
+N_AGGREGATES=8
+
+REPORT_DIR=workdir/reports
+
+sh ../common/setup.sh
+
+rm -rf "$REPORT_DIR"
+
+generate_input_file() {
+ cd cstar-cli || exit
+ echo "Generating input file..."
+ poetry run cst sender aggregates --sender 12345 --action trx_and_aggr --aggr-qty $N_AGGREGATES --avg-trx 3
+ cd ..
+ FILENAME=$(basename cstar-cli/generated/*.csv)
+ echo "Generated: $FILENAME"
+ cp cstar-cli/generated/"$FILENAME" ./workdir/input/"$FILENAME"
+}
+run_batch_service() {
+ java -jar ../common/rtd-ms-transaction-filter.jar
+}
+get_file_sent_occurrence_in_report() {
+ OUTPUT_FILENAME=$(ls workdir/output | grep -E "ADE.*.pgp")
+ FILE_REPORT_NAME=$(ls -v "$REPORT_DIR" | tail -n 1)
+ FILE_SENT_OCCURRENCE_IN_REPORT=$(grep -c $OUTPUT_FILENAME < "$REPORT_DIR"/"$FILE_REPORT_NAME")
+ }
+
+### file generation
+generate_input_file
+
+### batch service configuration
+# shellcheck source=../common/setenv_env.sh
+source ../common/setenv_"$ENV".sh
+source setenv.sh
+
+# force reports directory to be workdir/reports
+export ACQ_BATCH_FILE_REPORT_PATH="$REPORT_DIR"
+export ACQ_BATCH_TRX_SENDER_ADE_ENABLED=false
+
+echo "Executing batch service..."
+### batch service run
+run_batch_service
+#### ASSERTIONS
+
+# expected result: job went fine and report directory has been created
+if [ -d "$ACQ_BATCH_FILE_REPORT_PATH" ]
+then
+ echo "Directory "$REPORT_DIR" does exists: [SUCCESS]"
+else
+ echo "Directory "$REPORT_DIR" does not exists: [FAILED]"
+ exit 2
+fi
+
+# one report is downloaded
+if [ "$(ls -1 "$REPORT_DIR" | wc -l)" -eq 1 ]
+then
+ echo "Directory "$REPORT_DIR" is not empty: [SUCCESS]"
+else
+ echo "Directory "$REPORT_DIR" is empty: [FAILED]"
+ exit 2
+fi
+
+rm -rf cstar-cli
+rm -rf workdir
+
+exit 0
diff --git a/acceptance_tests/file_report/setenv.sh b/acceptance_tests/file_report/setenv.sh
new file mode 100644
index 00000000..563479ce
--- /dev/null
+++ b/acceptance_tests/file_report/setenv.sh
@@ -0,0 +1,6 @@
+export ACQ_BATCH_TRX_SENDER_RTD_ENABLED=false
+export ACQ_BATCH_TRX_SENDER_ADE_ENABLED=true
+
+export ACQ_BATCH_DELETE_OUTPUT_FILE=KEEP
+export ACQ_BATCH_WRITER_ADE_SPLIT_THRESHOLD=10
+export ACQ_BATCH_INPUT_CHUNK_SIZE=10
diff --git a/acceptance_tests/filename_uniqueness/fail_upload_file_name_not_unique.bash b/acceptance_tests/filename_uniqueness/fail_upload_file_name_not_unique.bash
new file mode 100755
index 00000000..78588811
--- /dev/null
+++ b/acceptance_tests/filename_uniqueness/fail_upload_file_name_not_unique.bash
@@ -0,0 +1,75 @@
+#!/bin/bash
+
+if [ $# -ne 1 ]; then
+ echo "Illegal number of parameters (1 mandatory, was $#)" >&1
+ echo "usage: bash script_splitting.bash env" >&1
+ exit 2
+fi
+
+ENV=$1
+
+N_AGGREGATES=10
+
+RETRY_MAX_ATTEMPTS=3
+
+sh ../common/setup.sh
+
+generate_input_file() {
+ cd cstar-cli || exit
+ echo "Generating input file..."
+ poetry run cst sender aggregates --sender 12345 --action trx_and_aggr --aggr-qty $N_AGGREGATES --avg-trx 3
+ cd ..
+ FILENAME=$(basename cstar-cli/generated/*.csv)
+ echo "Generated: $FILENAME"
+ cp cstar-cli/generated/"$FILENAME" ./workdir/input/"$FILENAME"
+}
+run_batch_service() {
+ echo "Executing batch service..."
+ java -jar ../common/rtd-ms-transaction-filter.jar
+}
+check_one_file_sent() {
+ # check if the first file has been uploaded
+ N_UPLOADS=$(grep -c "uploaded with success (status was: 201)"
rtd-ms-transaction-filter-api
it.gov.pagopa.rtd.ms.transaction_filter.api
- 1.4.1
+ 1.5.0
rtd-ms-transaction-filter-api-batch
- 1.4.1
+ 1.5.0
diff --git a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatch.java b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatch.java
index d21c1e54..b78a9a9c 100644
--- a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatch.java
+++ b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatch.java
@@ -15,6 +15,7 @@
import it.gov.pagopa.rtd.transaction_filter.batch.step.tasklet.SelectTargetInputFileTasklet;
import it.gov.pagopa.rtd.transaction_filter.batch.step.tasklet.SenderAdeAckFilesRecoveryTasklet;
import it.gov.pagopa.rtd.transaction_filter.connector.AbiToFiscalCodeRestClient;
+import it.gov.pagopa.rtd.transaction_filter.connector.FileReportRestClient;
import it.gov.pagopa.rtd.transaction_filter.connector.SenderAdeAckRestClient;
import it.gov.pagopa.rtd.transaction_filter.service.HpanConnectorService;
import it.gov.pagopa.rtd.transaction_filter.service.StoreService;
@@ -29,10 +30,13 @@
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.Step;
+import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.job.builder.FlowJobBuilder;
+import org.springframework.batch.core.job.flow.FlowExecutionStatus;
+import org.springframework.batch.core.job.flow.JobExecutionDecider;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.launch.support.SimpleJobLauncher;
import org.springframework.batch.core.repository.JobRepository;
@@ -57,7 +61,6 @@
*
*
*/
-
@Configuration
@Data
@@ -75,6 +78,7 @@ public class TransactionFilterBatch {
private final HpanConnectorService hpanConnectorService;
private final AbiToFiscalCodeRestClient abiToFiscalCodeRestClient;
private final SenderAdeAckRestClient senderAdeAckRestClient;
+ private final FileReportRestClient fileReportRestClient;
private final TransactionWriterService transactionWriterService;
private final StoreService storeService;
@@ -118,14 +122,16 @@ public class TransactionFilterBatch {
private String senderAdeAckFilesDirectoryPath;
@Value("${batchConfiguration.TransactionFilterBatch.transactionFilter.transactionLogsPath}")
private String logsDirectoryPath;
+ @Value("${batchConfiguration.TransactionFilterBatch.fileReportRecovery.enabled}")
+ private Boolean fileReportRecoveryEnabled;
+ @Value("${batchConfiguration.TransactionFilterBatch.transactionSenderPending.enabled}")
+ private Boolean sendPendingFilesStepEnabled;
+ private static final String FALSE = Boolean.FALSE.toString();
+ private static final String TRUE = Boolean.TRUE.toString();
private DataSource dataSource;
PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
- public void closeChannels() {
- transactionWriterService.closeAll();
- }
-
public void clearStoreService() {
storeService.clearAll();
}
@@ -244,20 +250,20 @@ public void setDataSource(DataSource dataSource) {
}
/**
- * This method builds a flow which can be decomposed in the following
+ * This method builds a flow which can be decomposed in the following
* steps:
*
- * - Attempts panlist recovery, if enabled. In case of a failure in the
+ *
- Attempts panlist recovery, if enabled. In case of a failure in the
* execution, the process is stopped.
- * - Attempts salt recovery, if enabled. In case of a failure in the
- * execution, the process is stopped. Otherwise, the panList step is
+ *
- Attempts salt recovery, if enabled. In case of a failure in the
+ * execution, the process is stopped. Otherwise, the panList step is
* executed
- * - The panList step is executed, to store the .csv files including the
- * list of active pans. If the step fails, the file archival tasklet is
+ *
- The panList step is executed, to store the .csv files including the
+ * list of active pans. If the step fails, the file archival tasklet is
* called, otherwise the transaction filter step is called.
- * - The transaction filter step checks the records with the stored pans,
- * writing the matching records in the output file. If the process fails,
- * the file management tasklet is called, otherwise the transaction sender
+ *
- The transaction filter step checks the records with the stored pans,
+ * writing the matching records in the output file. If the process fails,
+ * the file management tasklet is called, otherwise the transaction sender
* step si called.
* - Attempts sending the output files through an REST channel, if
* enabled. The file management tasklet is always called, after the step
@@ -275,6 +281,11 @@ public FlowJobBuilder transactionJobBuilder() {
.listener(jobListener())
.start(pagopaPublicKeyRecoveryTask(this.storeService))
.on(FAILED).end()
+ .on("*").to(fileReportStepDecider(fileReportRecoveryEnabled))
+ .on(TRUE).to(transactionFilterStep.fileReportRecoveryStep(fileReportRestClient))
+ .from(fileReportStepDecider(fileReportRecoveryEnabled))
+ .on(FALSE).to(selectTargetInputFileTask(this.storeService))
+ .from(transactionFilterStep.fileReportRecoveryStep(fileReportRestClient))
.on("*").to(selectTargetInputFileTask(this.storeService))
.on(FAILED).end()
.on("*").to(preventReprocessingFilenameAlreadySeenTask(this.storeService, this.transactionWriterService))
@@ -319,7 +330,11 @@ public FlowJobBuilder transactionJobBuilder() {
.on(FAILED).to(fileManagementTask())
.from(transactionFilterStep.transactionSenderRtdMasterStep(this.hpanConnectorService))
.on("*").to(senderAdeAckFilesRecoveryTask())
- .on("*").to(transactionFilterStep.transactionSenderPendingMasterStep(this.hpanConnectorService))
+ .next(pendingStepDecider(sendPendingFilesStepEnabled))
+ .on(TRUE).to(transactionFilterStep.transactionSenderPendingMasterStep(this.hpanConnectorService))
+ .from(pendingStepDecider(sendPendingFilesStepEnabled))
+ .on(FALSE).to(fileManagementTask())
+ .from(transactionFilterStep.transactionSenderPendingMasterStep(this.hpanConnectorService))
.on("*").to(fileManagementTask())
.build();
}
@@ -329,6 +344,26 @@ public JobListener jobListener() {
return new JobListener();
}
+ /**
+ * Returns a Decider utilized to alter the job flow based on a condition. Status returned is "TRUE"
+ * if the boolean parameter is true, "FALSE" otherwise.
+ * @param enabled boolean value
+ * @return a job execution decider
+ */
+ @Bean
+ public JobExecutionDecider fileReportStepDecider(Boolean enabled) {
+ return (JobExecution jobExecution, StepExecution stepExecution) -> decider(enabled);
+ }
+
+ @Bean
+ public JobExecutionDecider pendingStepDecider(Boolean enabled) {
+ return (JobExecution jobExecution, StepExecution stepExecution) -> decider(enabled);
+ }
+
+ FlowExecutionStatus decider(Boolean enabled) {
+ return Boolean.TRUE.equals(enabled) ? new FlowExecutionStatus(TRUE) : new FlowExecutionStatus(FALSE);
+ }
+
@Bean
public Step hpanListRecoveryTask() {
HpanListRecoveryTasklet hpanListRecoveryTasklet = new HpanListRecoveryTasklet();
@@ -428,6 +463,7 @@ public Step fileManagementTask() {
fileManagementTasklet.setTransactionWriterService(transactionWriterService);
fileManagementTasklet.setSuccessPath(successArchivePath);
fileManagementTasklet.setUploadPendingPath(pendingArchivePath);
+ fileManagementTasklet.setErrorPath(errorArchivePath);
fileManagementTasklet.setHpanDirectory(panReaderStep.getHpanDirectoryPath());
fileManagementTasklet.setOutputDirectory(transactionFilterStep.getOutputDirectoryPath());
fileManagementTasklet.setDeleteProcessedFiles(deleteProcessedFiles);
diff --git a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/model/DeleteOutputFilesEnum.java b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/model/DeleteOutputFilesEnum.java
new file mode 100644
index 00000000..727e78c4
--- /dev/null
+++ b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/model/DeleteOutputFilesEnum.java
@@ -0,0 +1,5 @@
+package it.gov.pagopa.rtd.transaction_filter.batch.model;
+
+public enum DeleteOutputFilesEnum {
+ ALWAYS, ERROR, KEEP
+}
\ No newline at end of file
diff --git a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/TransactionFilterStep.java b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/TransactionFilterStep.java
index addf9338..50007345 100644
--- a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/TransactionFilterStep.java
+++ b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/TransactionFilterStep.java
@@ -13,14 +13,17 @@
import it.gov.pagopa.rtd.transaction_filter.batch.step.processor.TransactionAggregationReaderProcessor;
import it.gov.pagopa.rtd.transaction_filter.batch.step.processor.TransactionAggregationWriterProcessor;
import it.gov.pagopa.rtd.transaction_filter.batch.step.reader.CustomIteratorItemReader;
+import it.gov.pagopa.rtd.transaction_filter.batch.step.reader.FileReportItemReader;
import it.gov.pagopa.rtd.transaction_filter.batch.step.reader.TransactionFlatFileItemReader;
import it.gov.pagopa.rtd.transaction_filter.batch.step.tasklet.PGPEncrypterTasklet;
import it.gov.pagopa.rtd.transaction_filter.batch.step.tasklet.TransactionChecksumTasklet;
import it.gov.pagopa.rtd.transaction_filter.batch.step.tasklet.TransactionSenderRestTasklet;
import it.gov.pagopa.rtd.transaction_filter.batch.step.writer.ChecksumHeaderWriter;
import it.gov.pagopa.rtd.transaction_filter.batch.step.writer.PGPFlatFileItemWriter;
+import it.gov.pagopa.rtd.transaction_filter.connector.FileReportRestClient;
import it.gov.pagopa.rtd.transaction_filter.connector.HpanRestClient;
import it.gov.pagopa.rtd.transaction_filter.connector.HpanRestClient.SasScope;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileMetadata;
import it.gov.pagopa.rtd.transaction_filter.service.HpanConnectorService;
import it.gov.pagopa.rtd.transaction_filter.service.StoreService;
import it.gov.pagopa.rtd.transaction_filter.service.TransactionWriterService;
@@ -54,6 +57,7 @@
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.LineMapper;
import org.springframework.batch.item.file.MultiResourceItemWriter;
+import org.springframework.batch.item.file.builder.FlatFileItemWriterBuilder;
import org.springframework.batch.item.file.builder.MultiResourceItemWriterBuilder;
import org.springframework.batch.item.file.mapping.FieldSetMapper;
import org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor;
@@ -66,6 +70,7 @@
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.context.annotation.PropertySource;
+import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.UrlResource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
@@ -133,6 +138,10 @@ public class TransactionFilterStep {
private Boolean transactionSenderPendingEnabled;
@Value("${batchConfiguration.TransactionFilterBatch.transactionWriterAde.splitThreshold}")
private int adeSplitThreshold;
+ @Value("${batchConfiguration.TransactionFilterBatch.fileReportRecovery.directoryPath}")
+ private String fileReportDirectory;
+ @Value("${batchConfiguration.TransactionFilterBatch.fileReportRecovery.fileNamePrefix}")
+ private String fileReportPrefixName;
public static final String RTD_OUTPUT_FILE_PREFIX = "CSTAR.";
public static final String ADE_OUTPUT_FILE_PREFIX = "ADE.";
@@ -150,7 +159,10 @@ public class TransactionFilterStep {
"senderCode", "operationType", "circuitType", "pan", "trxDate", "idTrxAcquirer",
"idTrxIssuer", "correlationId", "amount", "amountCurrency", "acquirerId", "merchantId",
"terminalId", "bin", "mcc", "fiscalCode", "vat", "posType", "par"};
+ private static final String[] REPORT_CSV_FIELDS = new String[]{
+ "name", "status", "size", "transmissionDate"};
private static final String CSV_DELIMITER = ";";
+ private static final String DATE_FORMAT_FOR_FILENAME = "yyyyMMddHHmmssSSS";
private final BatchConfig batchConfig;
private final StepBuilderFactory stepBuilderFactory;
@@ -241,6 +253,20 @@ public LineAggregator adeTransactionsAggregateLineAggr
return delimitedLineAggregator;
}
+ /**
+ * Composes CSV lines from file report model.
+ * @return a line aggregator
+ */
+ @Bean
+ public LineAggregator fileReportLineAggregator() {
+ BeanWrapperFieldExtractor extractor = new BeanWrapperFieldExtractor<>();
+ extractor.setNames(REPORT_CSV_FIELDS);
+ DelimitedLineAggregator delimitedLineAggregator = new DelimitedLineAggregator<>();
+ delimitedLineAggregator.setDelimiter(CSV_DELIMITER);
+ delimitedLineAggregator.setFieldExtractor(extractor);
+ return delimitedLineAggregator;
+ }
+
/**
* Builds an ItemWriter for filtered transactions. Implements encryption of the output file via PGP.
*
@@ -344,7 +370,7 @@ public Partitioner outputAdeFilesPartitioner(StoreService storeService) {
MultiResourcePartitioner partitioner = new MultiResourcePartitioner();
// do not match every file in output directory but only the ones generated from the input file
String outputFileRegex = getAdeFilesRegex(storeService.getTargetInputFile());
- String pathMatcher = outputDirectoryPath + File.separator + outputFileRegex;
+ String pathMatcher = resolver.getResource(outputDirectoryPath).getURI() + File.separator + outputFileRegex;
partitioner.setResources(resolver.getResources(pathMatcher));
partitioner.partition(partitionerSize);
return partitioner;
@@ -392,6 +418,68 @@ public PGPEncrypterTasklet getPGPEncrypterTasklet(
return pgpEncrypterTasklet;
}
+ /**
+ * Step that retrieve and write on file a file report
+ * @param restClient file report rest client
+ * @return a step
+ */
+ @Bean
+ public Step fileReportRecoveryStep(FileReportRestClient restClient) {
+ return stepBuilderFactory.get("file-report-recovery-step")
+ .chunk(chunkSize)
+ .reader(fileReportReader(restClient))
+ .writer(fileReportWriter())
+ .faultTolerant()
+ .build();
+ }
+
+ /**
+ * ItemReader that retrieve a file report JSON from a rest client and converts it to FileMetadata
+ * @param restClient file report rest client
+ * @return a itemReader
+ */
+ @Bean
+ public ItemReader fileReportReader(FileReportRestClient restClient) {
+ return new FileReportItemReader(restClient);
+ }
+
+ /**
+ * ItemWriter that save on file the file report. It implements a headerCallback with the field names
+ * and a line aggregator to convert the pojo into a CSV file with ";" as delimiter.
+ * @return a itemWriter
+ */
+ @SneakyThrows
+ @Bean
+ public ItemWriter fileReportWriter() {
+ DateTimeFormatter fmt = DateTimeFormatter.ofPattern(DATE_FORMAT_FOR_FILENAME);
+ String currentDate = OffsetDateTime.now().format(fmt);
+
+ Resource outputResource = new FileSystemResource(resolver.getResource(
+ getPathToResolve(fileReportDirectory)).getFile().getAbsolutePath()
+ .concat("/")
+ .concat(fileReportPrefixName)
+ .concat("-")
+ .concat(currentDate)
+ .concat(".csv"));
+
+ return new FlatFileItemWriterBuilder()
+ .name("file-report-item-writer")
+ .resource(outputResource)
+ .headerCallback(writer -> writer.write(String.join(CSV_DELIMITER, REPORT_CSV_FIELDS)))
+ .lineAggregator(fileReportLineAggregator())
+ .build();
+ }
+
+ /**
+ * Convert a path adding the prefix "file:" if it does not contain "classpath:" already. For test purpose.
+ * @param directory
+ * @return
+ */
+ private String getPathToResolve(String directory) {
+ return directory.startsWith("classpath:") ? directory
+ : "file:".concat(directory);
+ }
+
/**
* Builds a dummy ItemWriter to use during transaction aggregation.
* Since we're reading from a chunk-oriented ItemReader and aggregating data in-memory
@@ -479,7 +567,7 @@ public Step transactionFilterMasterStep(StoreService storeService, TransactionWr
*/
@Bean
public Step transactionFilterWorkerStep(StoreService storeService, TransactionWriterService transactionWriterService) {
- DateTimeFormatter fmt = DateTimeFormatter.ofPattern("yyyyMMddHHmmssSSS");
+ DateTimeFormatter fmt = DateTimeFormatter.ofPattern(DATE_FORMAT_FOR_FILENAME);
String executionDate = OffsetDateTime.now().format(fmt);
return stepBuilderFactory.get("transaction-filter-worker-step")
.chunk(chunkSize)
@@ -528,7 +616,7 @@ public Step transactionAggregationReaderMasterStep(StoreService storeService, Tr
*/
@Bean
public Step transactionAggregationReaderWorkerStep(StoreService storeService, TransactionWriterService transactionWriterService) {
- DateTimeFormatter fmt = DateTimeFormatter.ofPattern("yyyyMMddHHmmssSSS");
+ DateTimeFormatter fmt = DateTimeFormatter.ofPattern(DATE_FORMAT_FOR_FILENAME);
String executionDate = OffsetDateTime.now().format(fmt);
return stepBuilderFactory.get("transaction-aggregation-reader-worker-step")
.chunk(chunkSize)
@@ -692,7 +780,8 @@ public TransactionItemProcessListener transactionAdeItemProcessListener(
@JobScope
public Partitioner transactionSenderRtdPartitioner() throws IOException {
MultiResourcePartitioner partitioner = new MultiResourcePartitioner();
- String pathMatcher = outputDirectoryPath + File.separator + RTD_OUTPUT_FILE_PREFIX + REGEX_PGP_FILES;
+ String pathMatcher = resolver.getResource(outputDirectoryPath).getURI() + File.separator +
+ RTD_OUTPUT_FILE_PREFIX + REGEX_PGP_FILES;
partitioner.setResources(resolver.getResources(pathMatcher));
partitioner.partition(partitionerSize);
return partitioner;
@@ -710,7 +799,8 @@ public Partitioner transactionSenderAdePartitioner(StoreService storeService) th
MultiResourcePartitioner partitioner = new MultiResourcePartitioner();
String fileNameWithoutExtension = storeService.getTargetInputFile().replace(".csv", "");
String outputFilePrefix = fileNameWithoutExtension.replace("CSTAR", "ADE");
- String pathMatcher = outputDirectoryPath + File.separator + outputFilePrefix + REGEX_PGP_FILES;
+ String pathMatcher = resolver.getResource(outputDirectoryPath).getURI() + File.separator +
+ outputFilePrefix + REGEX_PGP_FILES;
partitioner.setResources(resolver.getResources(pathMatcher));
partitioner.partition(partitionerSize);
return partitioner;
diff --git a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/processor/TransactionAggregationWriterProcessor.java b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/processor/TransactionAggregationWriterProcessor.java
index ad2ab3ba..a8bc8595 100644
--- a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/processor/TransactionAggregationWriterProcessor.java
+++ b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/processor/TransactionAggregationWriterProcessor.java
@@ -35,7 +35,7 @@ public AdeTransactionsAggregate process(AggregationKey key) {
aggregate.setTransmissionDate(transmissionDate);
aggregate.setAccountingDate(key.getAccountingDate().getDate());
aggregate.setNumTrx(storeService.getAggregate(key).getNumTrx());
- aggregate.setTotalAmount((long) storeService.getAggregate(key).getTotalAmount());
+ aggregate.setTotalAmount(storeService.getAggregate(key).getTotalAmount());
aggregate.setCurrency("978");
aggregate.setAcquirerId(key.getAcquirerId().getId());
aggregate.setMerchantId(key.getMerchantId());
diff --git a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/reader/CustomIteratorItemReader.java b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/reader/CustomIteratorItemReader.java
index 56ed0068..d74c1bfa 100644
--- a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/reader/CustomIteratorItemReader.java
+++ b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/reader/CustomIteratorItemReader.java
@@ -1,11 +1,11 @@
package it.gov.pagopa.rtd.transaction_filter.batch.step.reader;
import java.util.Iterator;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.annotation.BeforeStep;
+import lombok.Setter;
import org.springframework.batch.item.ItemReader;
import org.springframework.lang.Nullable;
+@Setter
public class CustomIteratorItemReader implements ItemReader {
private Iterable iterable;
private Iterator iterator;
@@ -14,11 +14,6 @@ public CustomIteratorItemReader(Iterable iterable) {
this.iterable = iterable;
}
- @BeforeStep
- public void initializeState(StepExecution stepExecution) {
- this.iterator = null;
- }
-
@Nullable
public synchronized T read() {
if (this.iterator == null) {
diff --git a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/reader/FileReportItemReader.java b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/reader/FileReportItemReader.java
new file mode 100644
index 00000000..027fd0ef
--- /dev/null
+++ b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/reader/FileReportItemReader.java
@@ -0,0 +1,28 @@
+package it.gov.pagopa.rtd.transaction_filter.batch.step.reader;
+
+import it.gov.pagopa.rtd.transaction_filter.connector.FileReportRestClient;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileMetadata;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Objects;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.annotation.BeforeStep;
+
+public class FileReportItemReader extends CustomIteratorItemReader {
+
+ private final FileReportRestClient restClient;
+
+ public FileReportItemReader(FileReportRestClient restClient) {
+ super(Collections.emptyList());
+ this.restClient = restClient;
+ }
+
+ @BeforeStep
+ public void initializeState(StepExecution stepExecution) {
+ Collection files = Objects.requireNonNull(restClient.getFileReport())
+ .getFilesRecentlyUploaded();
+ if (files != null) {
+ super.setIterable(files);
+ }
+ }
+}
diff --git a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/tasklet/FileManagementTasklet.java b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/tasklet/FileManagementTasklet.java
index 87f83745..b4077f61 100644
--- a/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/tasklet/FileManagementTasklet.java
+++ b/api/batch/src/main/java/it/gov/pagopa/rtd/transaction_filter/batch/step/tasklet/FileManagementTasklet.java
@@ -1,8 +1,19 @@
package it.gov.pagopa.rtd.transaction_filter.batch.step.tasklet;
+import it.gov.pagopa.rtd.transaction_filter.batch.model.DeleteOutputFilesEnum;
import it.gov.pagopa.rtd.transaction_filter.service.TransactionWriterService;
-import java.security.SecureRandom;
+import java.io.File;
+import java.io.IOException;
+import java.time.OffsetDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import java.util.UUID;
+import java.util.stream.Collectors;
import lombok.Data;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@@ -19,16 +30,6 @@
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.util.Assert;
-import java.io.File;
-import java.io.IOException;
-import java.time.OffsetDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.stream.Collectors;
-
/**
* implementation of the {@link Tasklet}, in which the execute method contains the logic for processed file archival,
* based on the status of conclusion for every processed file
@@ -43,6 +44,7 @@ public class FileManagementTasklet implements Tasklet, InitializingBean {
private String deleteOutputFiles;
private String manageHpanOnSuccess;
private String successPath;
+ private String errorPath;
private String uploadPendingPath;
private String hpanDirectory;
private String outputDirectory;
@@ -50,16 +52,17 @@ public class FileManagementTasklet implements Tasklet, InitializingBean {
PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
- /**
- *
- * @throws Exception
- */
+ private static final String PGP_REGEX = "*.pgp";
+ private static final String FILE_PROTOCOL = "file:";
+
@Override
public void afterPropertiesSet() throws Exception {
String assertionMessage = "directory must be set";
- Assert.notNull(resolver.getResources("file:" + successPath + "*.pgp"),
+ Assert.notNull(resolver.getResources(FILE_PROTOCOL + successPath + PGP_REGEX),
+ assertionMessage);
+ Assert.notNull(resolver.getResources(FILE_PROTOCOL + errorPath + PGP_REGEX),
assertionMessage);
- Assert.notNull(resolver.getResources("file:" + uploadPendingPath + "*.pgp"),
+ Assert.notNull(resolver.getResources(FILE_PROTOCOL + uploadPendingPath + PGP_REGEX),
assertionMessage);
Assert.notNull(resolver.getResources(hpanDirectory),
assertionMessage);
@@ -68,13 +71,22 @@ public void afterPropertiesSet() throws Exception {
/**
* Method that contains the logic for file archival, based on the exit status of each step obtained from the
* ChunkContext that contains a filename key in the {@link ExecutionContext}
- * @param stepContribution
- * @param chunkContext
+ *
+ * recap of every step with relative filename evaluated:
+ * step_name - file
+ * transaction-checksum-worker-step - input file
+ * transaction-aggregation-reader-worker-step - input file
+ * transaction-aggregation-writer-worker-step - input file
+ * encrypt-aggregate-chunks-worker-step - output csv file ade
+ * transaction-sender-ade-worker-step - output pgp file ade
+ * hpan-recovery-worker-step - hpan file
+ * transaction-filter-worker-step - input file
+ * transaction-sender-rtd-worker-step - output pgp file rtd
+ *
* @return Status of the tasklet execution
- * @throws Exception
*/
@Override
- public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) throws Exception {
+ public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) {
closeAllFileChannels();
@@ -82,111 +94,47 @@ public RepeatStatus execute(StepContribution stepContribution, ChunkContext chun
List errorFilenames = new ArrayList<>();
hpanDirectory = makePathSystemIndependent(hpanDirectory);
- List hpanResources = getHpanFiles();
-
Collection stepExecutions = chunkContext.getStepContext().getStepExecution().getJobExecution()
- .getStepExecutions();
+ .getStepExecutions();
- // Since more steps can process the same input file we must keep track
- // of already processed files to avoid trying to archive/delete twice
- // the same one (and thus failing the second time).
- List alreadyProcessedFiles = new ArrayList<>();
+ // map to keep track of the worst status associated to a filename among all steps with the same filename
+ Map filenameWithStatusMap = new HashMap<>();
for (StepExecution stepExecution : stepExecutions) {
if (stepExecution.getExecutionContext().containsKey("fileName")) {
String file = stepExecution.getExecutionContext().getString("fileName");
- if(alreadyProcessedFiles.contains(file)) {
- log.info("Already managed file: {}", file);
- continue;
- } else {
- alreadyProcessedFiles.add(file);
- }
-
- String path = null;
-
- try {
- path = resolver.getResource(file).getFile().getAbsolutePath();
- } catch (Exception e) {
- log.error(e.getMessage(),e);
- path = file.replace("file:/", "");
- }
-
- try {
- boolean isComplete = BatchStatus.COMPLETED.equals(stepExecution.getStatus()) &&
- stepExecution.getFailureExceptions().isEmpty();
- executionWithErrors = executionWithErrors || !isComplete;
- if (!isComplete) {
- String[] filename = makePathSystemIndependent(file).split("/");
- ArrayList filePartsArray = new ArrayList<>(Arrays.asList(
- filename[filename.length - 1].split("\\.")));
- if (filePartsArray.size() == 1) {
- errorFilenames.add(filePartsArray.get(0));
- } else {
- filePartsArray.remove(filePartsArray.size()-1);
- String[] fileParts = new String[0];
- fileParts = filePartsArray.toArray(fileParts);
- errorFilenames.add(String.join(".", fileParts));
- }
- }
-
- boolean isHpanFile = hpanResources.contains(makePathSystemIndependent(path));
- boolean isOutputFile = path.contains(getOutputDirectoryAbsolutePath());
- if (Boolean.TRUE.equals(deleteProcessedFiles) || (isComplete && isHpanFile && manageHpanOnSuccess.equals("DELETE"))) {
- log.info("Removing processed file: {}", file);
- FileUtils.forceDelete(FileUtils.getFile(path));
- } else if (!isOutputFile && (!isHpanFile || !isComplete || manageHpanOnSuccess.equals("ARCHIVE"))) {
- log.info("Archiving processed file: {}", file);
- archiveFile(file, path, isComplete);
- }
- } catch (Exception e) {
- log.error(e.getMessage(), e);
+ boolean isComplete = BatchStatus.COMPLETED.equals(stepExecution.getStatus()) &&
+ stepExecution.getFailureExceptions().isEmpty();
+ executionWithErrors = executionWithErrors || !isComplete;
+ // errorFilenames is populated with the filename of steps that went in error.
+ // this list will be compared with the files in the output directory to delete those which matches
+ if (!isComplete) {
+ errorFilenames.add(getFilenameWithoutExtension(file));
}
+ filenameWithStatusMap.merge(file, stepExecution.getStatus(),
+ (oldValue, newValue) -> BatchStatus.FAILED.equals(oldValue)? oldValue : newValue);
}
}
- if ("ALWAYS".equals(deleteOutputFiles) || ("ERROR".equals(deleteOutputFiles) && executionWithErrors)) {
- List outputDirectoryResources =
- Arrays.asList(resolver.getResources(makePathSystemIndependent(outputDirectory) + "/*"));
- outputDirectoryResources.forEach(outputDirectoryResource ->
- {
- if (deleteOutputFiles.equals("ALWAYS") || (errorFilenames.stream().anyMatch(
- errorFilename -> {
- try {
- return outputDirectoryResource.getFile().getAbsolutePath().contains(errorFilename);
- } catch (IOException e) {
- log.error(e.getMessage(),e);
- return false;
- }
- }))
- ) {
- try {
- log.info("Deleting output file: {}", outputDirectoryResource.getFile());
- FileUtils.forceDelete(outputDirectoryResource.getFile());
- } catch (IOException e) {
- log.error(e.getMessage(), e);
- }
- }
- });
- }
+ // evaluate only the worst case status among steps with the same filename (e.g. checksum and transaction processing steps)
+ manageFilesBasedOnFilenameAndStatus(filenameWithStatusMap);
+
+ // this code removes only the RTD files because the input filename matches the output filename (without extensions)
+ // in order to maintain the retro compatibility with the RTD files, this code will stay until the splitting on RTD is implemented
+ deleteOutputFilesRtdBasedOnFlags(executionWithErrors, errorFilenames);
deleteEmptyLogFiles();
return RepeatStatus.FINISHED;
}
- @SneakyThrows
- private String getOutputDirectoryAbsolutePath() {
- return Arrays.stream(resolver.getResources(outputDirectory)).map(resource -> {
- try {
- return makePathSystemIndependent(resource.getFile().getAbsolutePath());
- } catch (IOException e) {
- log.error(e.getMessage(), e);
- return "";
- }
- }).findAny().orElse(null);
+ private void closeAllFileChannels() {
+ if (transactionWriterService != null) {
+ transactionWriterService.closeAll();
+ }
}
@SneakyThrows
@@ -201,42 +149,175 @@ private List getHpanFiles() {
}).collect(Collectors.toList());
}
- String makePathSystemIndependent(String path) {
+ private String makePathSystemIndependent(String path) {
return path.replace("\\", "/");
}
- private void closeAllFileChannels() {
- if (transactionWriterService != null) {
- transactionWriterService.closeAll();
+ private void manageFilesBasedOnFilenameAndStatus(Map filenameWithStatusMap) {
+ List hpanResources = getHpanFiles();
+ filenameWithStatusMap.forEach((file, status) -> {
+ String absolutePath = getAbsolutePathFromFile(file);
+ boolean isComplete = BatchStatus.COMPLETED.equals(status);
+
+ // output file
+ boolean isOutputFile = isFileInsideOutputDirectory(absolutePath);
+ if (isOutputFile) {
+ manageOutputFile(absolutePath, isComplete);
+ return;
+ // the csv output files are not handled here, Should they? No, they must be deleted or left in output folder
+ }
+
+ // pending file
+ boolean isPendingFile = isFileInsidePendingDirectory(absolutePath);
+ if (isPendingFile) {
+ managePendingFiles(absolutePath, isComplete);
+ return;
+ }
+
+ boolean isHpanFile = hpanResources.contains(makePathSystemIndependent(absolutePath));
+ if (Boolean.TRUE.equals(deleteProcessedFiles)) {
+ deleteFile(new File(absolutePath));
+ } else if (isHpanFile) {
+ manageHpanFiles(file, absolutePath, isComplete);
+ } else {
+ // handle input file archive
+ archiveFile(file, absolutePath, isComplete);
+ }
+ });
+ }
+
+ private String getAbsolutePathFromFile(String file) {
+ String path;
+
+ try {
+ path = resolver.getResource(file).getFile().getAbsolutePath();
+ } catch (IOException e) {
+ log.warn("file {} not found", file);
+ path = file.replace("file:/", "");
}
+ return path;
+ }
+
+ private boolean isFileInsideOutputDirectory(String absolutePath) {
+ String pathWithoutFile = absolutePath.substring(0, absolutePath.lastIndexOf("/"));
+ return pathWithoutFile.equals(getAbsolutePathFromFile(outputDirectory));
+ }
+
+ private void manageOutputFile(String path, boolean isComplete) {
+ // move every pgp file that failed into pending folder
+ if (isOutputFileToMoveToPending(path, isComplete)) {
+ moveToPendingDirectory(path);
+ } else if (isOutputFileToDelete(isComplete)) {
+ deleteFile(FileUtils.getFile(path));
+ }
+ }
+
+ private boolean isFileInsidePendingDirectory(String absolutePath) {
+ String pathWithoutFile = absolutePath.substring(0, absolutePath.lastIndexOf("/"));
+ return pathWithoutFile.equals(getAbsolutePathFromFile(uploadPendingPath));
+ }
+
+ private void managePendingFiles(String absolutePath, boolean isComplete) {
+ // delete the pending files if they have been sent, otherwise leave them in directory pending
+ if (isComplete) {
+ deleteFile(new File(absolutePath));
+ }
+ }
+
+ private boolean isOutputFileToMoveToPending(String path, boolean isComplete) {
+ return path.endsWith(".pgp") && !isComplete;
}
@SneakyThrows
- private void archiveFile(String file, String path, boolean isCompleted) {
- File destinationFile = getDestionationFileByStatus(file, isCompleted);
+ private void moveToPendingDirectory(String path) {
+ log.info("Moving to pending directory {}", path);
+ String archivalPath = resolver.getResources(uploadPendingPath)[0].getFile().getAbsolutePath();
+ File destinationFile = FileUtils.getFile(archivalPath + File.separator + getFilenameFromPath(path));
FileUtils.moveFile(FileUtils.getFile(path), destinationFile);
}
+ private boolean isOutputFileToDelete(boolean isComplete) {
+ return DeleteOutputFilesEnum.ALWAYS.name().equals(deleteOutputFiles) ||
+ (DeleteOutputFilesEnum.ERROR.name().equals(deleteOutputFiles) && !isComplete) ||
+ Boolean.TRUE.equals(deleteProcessedFiles);
+ }
+
+ @SneakyThrows
+ private void deleteFile(File file) {
+ log.info("Deleting file: {}", file);
+ FileUtils.deleteQuietly(file) ;
+ }
+
@SneakyThrows
- private File getDestionationFileByStatus(String sourceFilePath, boolean isCompleted) {
+ private void deleteOutputFilesRtdBasedOnFlags(boolean executionWithErrors, List errorFilenames) {
+ if ("ALWAYS".equals(deleteOutputFiles) || ("ERROR".equals(deleteOutputFiles) && executionWithErrors)) {
+ Arrays.stream(resolver.getResources(makePathSystemIndependent(outputDirectory) + "/*"))
+ .map(this::getFileFromResource)
+ .filter(File::isFile)
+ .filter(outputFilename -> "ALWAYS".equals(deleteOutputFiles) || errorFilenames.contains(getFilenameWithoutExtension(outputFilename)))
+ .forEach(this::deleteFile);
+ }
+ }
- sourceFilePath = makePathSystemIndependent(sourceFilePath);
- String[] pathSplitted = sourceFilePath.split("/");
- String filename = pathSplitted[pathSplitted.length - 1];
+ @SneakyThrows
+ private File getFileFromResource(Resource outputDirectoryResource) {
+ return outputDirectoryResource.getFile();
+ }
+
+ private String getFilenameWithoutExtension(File outputFilename) {
+ return getFilenameWithoutExtension(outputFilename.getAbsolutePath());
+ }
+
+ private String getFilenameWithoutExtension(String file) {
+ String filename = getFilenameFromPath(file);
+ return filename.substring(0, filename.lastIndexOf("."));
+ }
+
+ private void manageHpanFiles(String file, String path, boolean isComplete) {
+ if (isComplete && "DELETE".equals(manageHpanOnSuccess)) {
+ deleteFile(new File(path));
+ } else if (!isComplete || "ARCHIVE".equals(manageHpanOnSuccess)) {
+ archiveFile(file, path, isComplete);
+ }
+ }
+
+ private void archiveFile(String file, String path, boolean isCompleted) {
+ log.info("Archiving processed file: {}", file);
+ try {
+ File destinationFile = getDestinationFileByStatus(file, isCompleted);
+ FileUtils.moveFile(FileUtils.getFile(path), destinationFile);
+ } catch (IOException e) {
+ log.error("File {} cannot be moved to the destination path. Reason: {}", path, e.getMessage());
+ }
+ }
+
+ @SneakyThrows
+ private File getDestinationFileByStatus(String sourceFilePath, boolean isCompleted) {
+
+ String filename = getFilenameFromPath(sourceFilePath);
String destinationPath;
if (isCompleted) {
String archivalPath = resolver.getResources(successPath)[0].getFile().getAbsolutePath();
- DateTimeFormatter fmt = DateTimeFormatter.ofPattern("yyyyMMddHHmmssSSS");
- destinationPath = archivalPath + File.separator + UUID.randomUUID().toString().replace("-", "").substring(0, 20) +
- "_" + OffsetDateTime.now().format(fmt) + "_" + filename;
+ destinationPath = archivalPath + File.separator + addRandomPrefixToFilename(filename);
} else {
- String archivalPath = resolver.getResources(uploadPendingPath)[0].getFile().getAbsolutePath();
- destinationPath = archivalPath + File.separator + filename;
+ String archivalPath = resolver.getResources(errorPath)[0].getFile().getAbsolutePath();
+ destinationPath = archivalPath + File.separator + addRandomPrefixToFilename(filename);
}
return FileUtils.getFile(destinationPath);
}
+ private String getFilenameFromPath(String path) {
+ String[] pathSplitted = makePathSystemIndependent(path).split("/");
+ return pathSplitted[pathSplitted.length - 1];
+ }
+
+ private String addRandomPrefixToFilename(String filename) {
+ DateTimeFormatter fmt = DateTimeFormatter.ofPattern("yyyyMMddHHmmssSSS");
+ return UUID.randomUUID().toString().replace("-", "").substring(0, 20) +
+ "_" + OffsetDateTime.now().format(fmt) + "_" + filename;
+ }
+
@SneakyThrows
private void deleteEmptyLogFiles() {
if (logsDirectory == null) {
@@ -244,7 +325,7 @@ private void deleteEmptyLogFiles() {
}
FileUtils.listFiles(
- resolver.getResources(logsDirectory)[0].getFile(), new String[]{"csv"},false)
+ resolver.getResources(logsDirectory)[0].getFile(), new String[]{"csv"},false)
.stream()
.filter(file -> FileUtils.sizeOf(file) == 0)
.forEach(file -> {
@@ -252,4 +333,4 @@ private void deleteEmptyLogFiles() {
FileUtils.deleteQuietly(file);
});
}
-}
+}
\ No newline at end of file
diff --git a/api/batch/src/main/resources/config/transactionFilterBatch.properties b/api/batch/src/main/resources/config/transactionFilterBatch.properties
index 77e4bb80..a0aecddf 100644
--- a/api/batch/src/main/resources/config/transactionFilterBatch.properties
+++ b/api/batch/src/main/resources/config/transactionFilterBatch.properties
@@ -21,3 +21,6 @@ batchConfiguration.TransactionFilterBatch.transactionFilter.deleteProcessedFiles
batchConfiguration.TransactionFilterBatch.transactionFilter.deleteOutputFiles=${ACQ_BATCH_DELETE_OUTPUT_FILE:ALWAYS}
batchConfiguration.TransactionFilterBatch.transactionFilter.manageHpanOnSuccess=${ACQ_BATCH_HPAN_ON_SUCCESS:DELETE}
batchConfiguration.TransactionFilterBatch.isolationForCreate=${ACQ_BATCH_TRX_ISOLATION_FOR_CREATE:ISOLATION_SERIALIZABLE}
+batchConfiguration.TransactionFilterBatch.fileReportRecovery.directoryPath=${ACQ_BATCH_FILE_REPORTS_PATH:resources/reports}
+batchConfiguration.TransactionFilterBatch.fileReportRecovery.enabled=${ACQ_BATCH_FILE_REPORT_RECOVERY_ENABLED:true}
+batchConfiguration.TransactionFilterBatch.fileReportRecovery.fileNamePrefix=${ACQ_BATCH_FILE_REPORTS_PREFIX:report-tae}
diff --git a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchAdeSplittingTest.java b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchAdeSplittingTest.java
index 91c6a200..47a1884d 100644
--- a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchAdeSplittingTest.java
+++ b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchAdeSplittingTest.java
@@ -99,6 +99,8 @@
"batchConfiguration.TransactionFilterBatch.transactionSenderRtd.enabled=false",
"batchConfiguration.TransactionFilterBatch.transactionSenderAde.enabled=false",
"batchConfiguration.TransactionFilterBatch.senderAdeAckFilesRecovery.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.directoryPath=classpath:/test-encrypt/reports",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.enabled=false",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessLogging=true",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessFileLogging=true",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnReadErrorLogging=true",
diff --git a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchFileReportTest.java b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchFileReportTest.java
new file mode 100644
index 00000000..e84cbd10
--- /dev/null
+++ b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchFileReportTest.java
@@ -0,0 +1,507 @@
+package it.gov.pagopa.rtd.transaction_filter.batch;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.times;
+import static org.springframework.transaction.annotation.Propagation.NOT_SUPPORTED;
+
+import it.gov.pagopa.rtd.transaction_filter.batch.TransactionFilterBatch;
+import it.gov.pagopa.rtd.transaction_filter.batch.config.TestConfig;
+import it.gov.pagopa.rtd.transaction_filter.batch.encryption.EncryptUtil;
+import it.gov.pagopa.rtd.transaction_filter.batch.step.TransactionFilterStep;
+import it.gov.pagopa.rtd.transaction_filter.connector.AbiToFiscalCodeRestClient;
+import it.gov.pagopa.rtd.transaction_filter.connector.FileReportRestClient;
+import it.gov.pagopa.rtd.transaction_filter.connector.HpanRestClient;
+import it.gov.pagopa.rtd.transaction_filter.connector.HpanRestClient.SasScope;
+import it.gov.pagopa.rtd.transaction_filter.connector.SasResponse;
+import it.gov.pagopa.rtd.transaction_filter.connector.SenderAdeAckRestClient;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileMetadata;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileReport;
+import it.gov.pagopa.rtd.transaction_filter.service.StoreService;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.time.LocalDateTime;
+import java.time.OffsetDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+import lombok.SneakyThrows;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.filefilter.IOFileFilter;
+import org.apache.commons.io.filefilter.WildcardFileFilter;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.mockito.BDDMockito;
+import org.mockito.Mockito;
+import org.springframework.batch.core.ExitStatus;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.JobParametersBuilder;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.test.JobLauncherTestUtils;
+import org.springframework.batch.test.JobRepositoryTestUtils;
+import org.springframework.batch.test.context.SpringBatchTest;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration;
+import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
+import org.springframework.boot.test.mock.mockito.MockBean;
+import org.springframework.boot.test.mock.mockito.SpyBean;
+import org.springframework.cloud.openfeign.FeignAutoConfiguration;
+import org.springframework.core.io.Resource;
+import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
+import org.springframework.test.annotation.DirtiesContext;
+import org.springframework.test.annotation.DirtiesContext.ClassMode;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
+import org.springframework.test.context.jdbc.Sql;
+import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.transaction.annotation.Transactional;
+
+@RunWith(SpringRunner.class)
+@SpringBatchTest
+@EnableAutoConfiguration
+@DataJpaTest
+@Transactional(propagation = NOT_SUPPORTED)
+@Sql({
+ "classpath:org/springframework/batch/core/schema-drop-hsqldb.sql",
+ "classpath:org/springframework/batch/core/schema-hsqldb.sql"})
+@ContextConfiguration(classes = {
+ TestConfig.class,
+ JacksonAutoConfiguration.class,
+ TransactionFilterBatch.class,
+ FeignAutoConfiguration.class
+})
+@TestPropertySource(
+ properties = {
+ "spring.main.allow-bean-definition-overriding=true",
+ "batchConfiguration.TransactionFilterBatch.panList.secretKeyPath=classpath:/test-encrypt/secretKey.asc",
+ "batchConfiguration.TransactionFilterBatch.panList.passphrase=test",
+ "batchConfiguration.TransactionFilterBatch.panList.skipLimit=0",
+ "batchConfiguration.TransactionFilterBatch.panList.hpanDirectoryPath=classpath:/test-encrypt/**/hpan/*pan*.pgp",
+ "batchConfiguration.TransactionFilterBatch.panList.linesToSkip=0",
+ "batchConfiguration.TransactionFilterBatch.panList.applyDecrypt=true",
+ "batchConfiguration.TransactionFilterBatch.panList.applyHashing=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.transactionDirectoryPath=classpath:/test-encrypt/**/transactions/",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.outputDirectoryPath=classpath:/test-encrypt/output",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.transactionLogsPath=classpath:/test-encrypt/errorLogs",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.applyHashing=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.timestampPattern=MM/dd/yyyy HH:mm:ss",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.applyEncrypt=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.saveHashing=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.deleteProcessedFiles=false",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.deleteOutputFiles=ERROR",
+ "batchConfiguration.TransactionFilterBatch.successArchivePath=classpath:/test-encrypt/success",
+ "batchConfiguration.TransactionFilterBatch.errorArchivePath=classpath:/test-encrypt/error",
+ "batchConfiguration.TransactionFilterBatch.pendingArchivePath=classpath:/test-encrypt/output/pending",
+ "batchConfiguration.TransactionFilterBatch.saltRecovery.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.pagopaPublicKeyRecovery.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.hpanListRecovery.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.abiToFiscalCodeMapRecovery.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.transactionSenderRtd.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.transactionSenderAde.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.transactionSenderPending.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.senderAdeAckFilesRecovery.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.senderAdeAckFilesRecovery.directoryPath=classpath:/test-encrypt/sender-ade-ack",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.directoryPath=classpath:/test-encrypt/reports",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.enabled=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessLogging=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessFileLogging=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnReadErrorLogging=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnReadErrorFileLogging=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessFileLogging=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnProcessErrorLogging=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnProcessErrorFileLogging=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnWriteErrorLogging=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnWriteErrorFileLogging=true",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.loggingFrequency=100",
+ "batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.writerPoolSize=5",
+ "batchConfiguration.TransactionFilterBatch.transactionWriterAde.splitThreshold=1000"
+ }
+)
+@DirtiesContext(classMode = ClassMode.AFTER_EACH_TEST_METHOD)
+public class TransactionFilterBatchFileReportTest {
+
+ @Autowired
+ private JobLauncherTestUtils jobLauncherTestUtils;
+
+ @Autowired
+ private JobRepositoryTestUtils jobRepositoryTestUtils;
+
+ @MockBean
+ private FileReportRestClient fileReportRestClient;
+
+ @SpyBean
+ StoreService storeServiceSpy;
+
+ @Rule
+ public TemporaryFolder tempFolder = new TemporaryFolder(new File(getClass().getResource("/test-encrypt").getFile()));
+
+ PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
+
+ @SneakyThrows
+ @Before
+ public void setUp() {
+ Mockito.reset(storeServiceSpy);
+
+ deleteFiles("classpath:/test-encrypt/errorLogs/*.csv");
+ deleteFiles("classpath:/test-encrypt/output/*.pgp");
+ deleteFiles("classpath:/test-encrypt/sender-ade-ack/*.csv");
+ deleteFiles("classpath:/test-encrypt/output/*.csv");
+ deleteFiles("classpath:/test-encrypt/reports/*.csv");
+ }
+
+ @SneakyThrows
+ @After
+ public void tearDown() {
+ tempFolder.delete();
+ }
+
+ @SneakyThrows
+ private void deleteFiles(String classpath) {
+ Resource[] resources = resolver.getResources(classpath);
+ for (Resource resource : resources) {
+ resource.getFile().delete();
+ }
+ }
+
+ @After
+ public void cleanUp() {
+ jobRepositoryTestUtils.removeJobExecutions();
+ }
+
+ @SneakyThrows
+ @Test
+ public void jobExecutionProducesExpectedFiles() {
+ LocalDateTime currentDate = LocalDateTime.now();
+ String publicKey = createPublicKey();
+ BDDMockito.doReturn(publicKey).when(storeServiceSpy).getKey("pagopa");
+ BDDMockito.doReturn(getStubFileReport(currentDate)).when(fileReportRestClient).getFileReport();
+ createPanPGP();
+
+ File outputFileTrn = createTrnOutputFile();
+ File outputFileAde = createAdeOutputFile();
+
+ // Check that the job exited with the right exit status
+ JobExecution jobExecution = jobLauncherTestUtils.launchJob(new JobParametersBuilder()
+ .addDate("startDateTime", new Date())
+ .toJobParameters());
+
+ Assert.assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
+
+ // Check that the HPAN store has been accessed as expected
+ BDDMockito.verify(storeServiceSpy, times(3)).store(any());
+ BDDMockito.verify(storeServiceSpy, times(4)).hasHpan(any());
+ BDDMockito.verify(storeServiceSpy, times(2)).getKey(any());
+
+ // Check that output folder contains expected files, and only those
+ Collection outputPgpFiles = getOutputPgpFiles();
+ Assert.assertEquals(2, outputPgpFiles.size());
+
+ Set outputPgpFilenames = outputPgpFiles.stream().map(File::getName).collect(Collectors.toSet());
+ Set expectedPgpFilenames = getExpectedPgpFilenames();
+ Assert.assertEquals(expectedPgpFilenames, outputPgpFilenames);
+
+ Set outputCsvFilenames = getOutputCsvFiles().stream().map(File::getName).collect(Collectors.toSet());
+ Set expectedCsvFilenames = getExpectedCsvFileNames();
+ assertThat(outputCsvFilenames).containsAll(expectedCsvFilenames);
+
+ List outputFileTrnContent = Files.readAllLines(outputFileTrn.toPath().toAbsolutePath());
+ List outputFileAdeContent = Files.readAllLines(outputFileAde.toPath().toAbsolutePath());
+
+ // Check that output files contain expected lines
+ Set expectedOutputFileTrnContent = getExpectedTrnOutputFileContent();
+ Set expectedOutputFileAdeContent = getExpectedAdeOutputFileContent();
+
+ Assert.assertEquals(expectedOutputFileTrnContent, new HashSet<>(outputFileTrnContent));
+ Assert.assertEquals("#sha256sum:8bca0fdabf06e1c30b716224c67a5753ac5d999cf6a375ac7adba16f725f2046",
+ outputFileTrnContent.get(0));
+ Assert.assertEquals(expectedOutputFileAdeContent, new HashSet<>(outputFileAdeContent));
+ Assert.assertEquals("#sha256sum:8bca0fdabf06e1c30b716224c67a5753ac5d999cf6a375ac7adba16f725f2046",
+ outputFileAdeContent.get(0));
+
+ // Check that encrypted output files have the same content of unencrypted ones
+ File trxEncFile = outputPgpFiles.stream().filter(p -> p.getName().equals("CSTAR.99999.TRNLOG.20220204.094652.001.csv.pgp")).collect(Collectors.toList()).iterator().next();
+
+ FileInputStream trxEncFileIS = new FileInputStream(trxEncFile);
+ FileInputStream secretFilePathIS = null;
+ try {
+ String secretKeyPath = "file:/" + this.getClass().getResource("/test-encrypt").getFile() + "/secretKey.asc";
+ Resource secretKeyResource = resolver.getResource(secretKeyPath);
+
+ secretFilePathIS = new FileInputStream(secretKeyResource.getFile());
+ byte[] trxEncFileDecryptedFileData = EncryptUtil.decryptFile(trxEncFileIS, secretFilePathIS, "test".toCharArray());
+ File trxEncFileDecryptedFile = tempFolder.newFile("trxEncFileDecrypted.csv");
+ FileUtils.writeByteArrayToFile(trxEncFileDecryptedFile, trxEncFileDecryptedFileData);
+
+ List trxEncFileDecryptedFileContent = Files.readAllLines(trxEncFileDecryptedFile.toPath().toAbsolutePath());
+ Assert.assertEquals(expectedOutputFileTrnContent, new HashSet<>(trxEncFileDecryptedFileContent));
+ } finally {
+ trxEncFileIS.close();
+ secretFilePathIS.close();
+ }
+
+ // Check that logs folder contains expected files
+ Collection outputLogsFiles = FileUtils.listFiles(
+ resolver.getResources("classpath:/test-encrypt/errorLogs")[0].getFile(), new String[]{"csv"}, false);
+ Assert.assertEquals(2, outputLogsFiles.size());
+
+ FileFilter fileFilter = new WildcardFileFilter("*_Rtd__FilteredRecords_CSTAR.99999.TRNLOG.20220204.094652.001.csv");
+ Collection trxFilteredFiles = FileUtils.listFiles(resolver.getResources("classpath:/test-encrypt/errorLogs")[0].getFile(), (IOFileFilter) fileFilter, null);
+ Assert.assertEquals(1, trxFilteredFiles.size());
+
+ fileFilter = new WildcardFileFilter("*_Ade__FilteredRecords_CSTAR.99999.TRNLOG.20220204.094652.001.csv");
+ Collection adeFilteredFiles = FileUtils.listFiles(resolver.getResources("classpath:/test-encrypt/errorLogs")[0].getFile(), (IOFileFilter) fileFilter, null);
+ Assert.assertEquals(1, adeFilteredFiles.size());
+
+ // empty log files get deleted
+ fileFilter = new WildcardFileFilter("*_Rtd__ErrorRecords_CSTAR.99999.TRNLOG.20220204.094652.001.csv");
+ Collection trxErrorFiles = FileUtils.listFiles(resolver.getResources("classpath:/test-encrypt/errorLogs")[0].getFile(), (IOFileFilter) fileFilter, null);
+ Assert.assertEquals(0, trxErrorFiles.size());
+
+ fileFilter = new WildcardFileFilter("*_Ade__ErrorRecords_CSTAR.99999.TRNLOG.20220204.094652.001.csv");
+ Collection adeErrorFiles = FileUtils.listFiles(resolver.getResources("classpath:/test-encrypt/errorLogs")[0].getFile(), (IOFileFilter) fileFilter, null);
+ Assert.assertEquals(0, adeErrorFiles.size());
+
+ // Check that logs files contains expected lines
+ File trxFilteredFile = trxFilteredFiles.iterator().next();
+ List trxFilteredContent = Files.readAllLines(trxFilteredFile.toPath().toAbsolutePath());
+ Assert.assertEquals(2, trxFilteredContent.size());
+ Assert.assertTrue(trxFilteredContent.contains("99999;00;01;pan4;03/20/2020 13:23:00;4444444444;8888;;3333;978;4444;0000;1;000002;5422;fis123;12345678901;00;par4"));
+ Assert.assertTrue(trxFilteredContent.contains("99999;00;01;pan5;2020-03-20T13:23:00;555555555;9999;;3333;978;4444;0000;1;000002;5422;fis123;12345678901;00;"));
+
+ File adeFilteredFile = adeFilteredFiles.iterator().next();
+ List adeFilteredContent = Files.readAllLines(adeFilteredFile.toPath().toAbsolutePath());
+ Assert.assertEquals(1, adeFilteredContent.size());
+ Assert.assertTrue(adeFilteredContent.contains("99999;00;01;pan5;2020-03-20T13:23:00;555555555;9999;;3333;978;4444;0000;1;000002;5422;fis123;12345678901;00;"));
+
+ Collection fileReportSaved = getFileReportSaved();
+ assertThat(fileReportSaved).isNotNull().hasSize(1);
+
+ List fileReportContent = Files.readAllLines(fileReportSaved.stream().findAny()
+ .orElse(new File("")).toPath());
+ assertThat(fileReportContent).isNotNull().containsExactly("name;status;size;transmissionDate",
+ "file1;RECEIVED;200;" + currentDate,
+ "file2;RECEIVED;300;" + currentDate.minusDays(4),
+ "file3;RECEIVED;400;" + currentDate.minusDays(10));
+ }
+
+ @SneakyThrows
+ @Test
+ public void givenAReportWhenLaunchFileReportStepThenSaveTheReportOnFile() {
+ LocalDateTime currentDate = LocalDateTime.now();
+ BDDMockito.doReturn(getStubFileReport(currentDate)).when(fileReportRestClient).getFileReport();
+
+ jobLauncherTestUtils.launchStep("file-report-recovery-step",
+ new JobParameters());
+
+ Mockito.verify(fileReportRestClient, times(1)).getFileReport();
+ Collection fileReportSaved = getFileReportSaved();
+
+ assertThat(fileReportSaved).isNotNull().hasSize(1);
+
+ List fileReportContent = Files.readAllLines(fileReportSaved.stream().findAny()
+ .orElse(new File("")).toPath());
+
+ assertThat(fileReportContent).isNotNull().containsExactly("name;status;size;transmissionDate",
+ "file1;RECEIVED;200;" + currentDate,
+ "file2;RECEIVED;300;" + currentDate.minusDays(4),
+ "file3;RECEIVED;400;" + currentDate.minusDays(10));
+ }
+
+ @SneakyThrows
+ @Test
+ public void givenEmptyReportWhenLaunchFileReportStepThenSaveTheReportWithHeaderOnly() {
+ BDDMockito.doReturn(getStubEmptyReport()).when(fileReportRestClient).getFileReport();
+
+ jobLauncherTestUtils.launchStep("file-report-recovery-step",
+ new JobParameters());
+
+ Mockito.verify(fileReportRestClient, times(1)).getFileReport();
+ Collection fileReportSaved = getFileReportSaved();
+
+ assertThat(fileReportSaved).isNotNull().hasSize(1);
+
+ List fileReportContent = Files.readAllLines(fileReportSaved.stream().findAny()
+ .orElse(new File("")).toPath());
+
+ assertThat(fileReportContent).isNotNull().contains("name;status;size;transmissionDate");
+ }
+
+ @SneakyThrows
+ @Test
+ public void givenMalformedReportWhenLaunchFileReportStepThenSaveTheReportWithHeaderOnly() {
+ // returns report with null field list
+ BDDMockito.doReturn(new FileReport()).when(fileReportRestClient).getFileReport();
+
+ jobLauncherTestUtils.launchStep("file-report-recovery-step",
+ new JobParameters());
+
+ Mockito.verify(fileReportRestClient, times(1)).getFileReport();
+ Collection fileReportSaved = getFileReportSaved();
+
+ assertThat(fileReportSaved).isNotNull().hasSize(1);
+
+ List fileReportContent = Files.readAllLines(fileReportSaved.stream().findAny()
+ .orElse(new File("")).toPath());
+
+ assertThat(fileReportContent).isNotNull().contains("name;status;size;transmissionDate");
+ }
+
+ @SneakyThrows
+ private Collection getFileReportSaved() {
+ return FileUtils.listFiles(resolver.getResources("classpath:/test-encrypt/reports")[0].getFile(), new String[]{"csv"}, false);
+ }
+
+ private FileReport getStubFileReport(LocalDateTime dateTime) {
+ FileReport fileReport = new FileReport();
+ List files = new ArrayList<>();
+
+ FileMetadata fileMetadata = new FileMetadata();
+ fileMetadata.setName("file1");
+ fileMetadata.setSize(200L);
+ fileMetadata.setTransmissionDate(dateTime);
+ fileMetadata.setStatus("RECEIVED");
+ files.add(fileMetadata);
+
+ fileMetadata = new FileMetadata();
+ fileMetadata.setName("file2");
+ fileMetadata.setSize(300L);
+ fileMetadata.setTransmissionDate(dateTime.minusDays(4));
+ fileMetadata.setStatus("RECEIVED");
+ files.add(fileMetadata);
+
+ fileMetadata = new FileMetadata();
+ fileMetadata.setName("file3");
+ fileMetadata.setSize(400L);
+ fileMetadata.setTransmissionDate(dateTime.minusDays(10));
+ fileMetadata.setStatus("RECEIVED");
+ files.add(fileMetadata);
+
+ fileReport.setFilesRecentlyUploaded(files);
+
+ return fileReport;
+ }
+
+ private FileReport getStubEmptyReport() {
+ FileReport fileReport = new FileReport();
+ fileReport.setFilesRecentlyUploaded(Collections.emptyList());
+
+ return fileReport;
+ }
+
+ private String createPublicKey() throws IOException {
+ String publicKeyPath = "file:/" + Objects.requireNonNull(
+ this.getClass().getResource("/test-encrypt")).getFile() + "/publicKey.asc";
+ Resource publicKeyResource = resolver.getResource(publicKeyPath);
+ FileInputStream publicKeyFilePathIS = new FileInputStream(publicKeyResource.getFile());
+ return IOUtils.toString(publicKeyFilePathIS);
+ }
+
+ @SneakyThrows
+ private void createPanPGP() {
+ tempFolder.newFolder("hpan");
+ File panPgp = tempFolder.newFile("hpan/pan.pgp");
+
+ FileOutputStream panPgpFOS = new FileOutputStream(panPgp);
+
+ EncryptUtil.encryptFile(panPgpFOS,
+ Objects.requireNonNull(this.getClass().getResource("/test-encrypt/pan")).getFile() + "/pan.csv",
+ EncryptUtil.readPublicKey(
+ this.getClass().getResourceAsStream("/test-encrypt/publicKey.asc")),
+ false, false);
+
+ panPgpFOS.close();
+ }
+
+ @SneakyThrows
+ private File createAdeOutputFile() {
+ File outputFileAde = new File(resolver.getResource("classpath:/test-encrypt/output")
+ .getFile().getAbsolutePath() + "/ADE.99999.TRNLOG.20220204.094652.001.01.csv");
+
+ outputFileAde.createNewFile();
+ return outputFileAde;
+ }
+
+ @SneakyThrows
+ private File createTrnOutputFile() {
+ File outputFileTrn = new File(resolver.getResource("classpath:/test-encrypt/output")
+ .getFile().getAbsolutePath() + "/CSTAR.99999.TRNLOG.20220204.094652.001.csv");
+
+ outputFileTrn.createNewFile();
+ return outputFileTrn;
+ }
+
+ @SneakyThrows
+ private Collection getOutputPgpFiles() {
+ return FileUtils.listFiles(
+ resolver.getResources("classpath:/test-encrypt/output")[0].getFile(), new String[]{"pgp"}, false);
+ }
+
+ private Set getExpectedPgpFilenames() {
+ Set expectedPgpFilenames = new HashSet<>();
+ expectedPgpFilenames.add("CSTAR.99999.TRNLOG.20220204.094652.001.csv.pgp");
+ expectedPgpFilenames.add("ADE.99999.TRNLOG.20220204.094652.001.01.csv.pgp");
+ return expectedPgpFilenames;
+ }
+
+ @SneakyThrows
+ private Collection getOutputCsvFiles() {
+ return FileUtils.listFiles(
+ resolver.getResources("classpath:/test-encrypt/output")[0].getFile(), new String[]{"csv"}, false);
+ }
+
+ private Set getExpectedCsvFileNames() {
+ Set expectedCsvFilenames = new HashSet<>();
+ expectedCsvFilenames.add("CSTAR.99999.TRNLOG.20220204.094652.001.csv");
+ expectedCsvFilenames.add("ADE.99999.TRNLOG.20220204.094652.001.01.csv");
+
+ return expectedCsvFilenames;
+ }
+
+ private Set getExpectedTrnOutputFileContent() {
+ Set expectedOutputFileTrnContent = new HashSet<>();
+ expectedOutputFileTrnContent.add("#sha256sum:8bca0fdabf06e1c30b716224c67a5753ac5d999cf6a375ac7adba16f725f2046");
+ expectedOutputFileTrnContent.add("99999;00;00;28aa47c8c6cd1a6b0a86ebe18471295796c88269868825b4cd41f94f0a07e88e;03/20/2020 10:50:33;1111111111;5555;;1111;978;22222;0000;1;000002;5422;fis123;12345678901;00;");
+ expectedOutputFileTrnContent.add("99999;00;01;e2df0a82ac0aa12921c398e1eba9119772db868650ebef22b8919fa0fb7642ed;03/20/2020 11:23:00;333333333;7777;;3333;978;4444;0000;1;000002;5422;fis123;12345678901;00;");
+ expectedOutputFileTrnContent.add("99999;01;00;805f89015f85948f7d7bdd57a0a81e4cd95fc81bdd1195a69c4ab139f0ebed7b;03/20/2020 11:04:53;2222222222;6666;;2222;978;3333;0000;1;000002;5422;fis123;12345678901;00;par2");
+ return expectedOutputFileTrnContent;
+ }
+
+ private Set getExpectedAdeOutputFileContent() {
+ String transmissionDate = getDateFormattedAsString();
+
+ Set expectedOutputFileAdeContent = new HashSet<>();
+ expectedOutputFileAdeContent.add("#sha256sum:8bca0fdabf06e1c30b716224c67a5753ac5d999cf6a375ac7adba16f725f2046");
+ expectedOutputFileAdeContent.add("99999;00;" + transmissionDate + ";03/20/2020;2;6666;978;4444;0000;1;fis123;12345678901;00");
+ expectedOutputFileAdeContent.add("99999;01;" + transmissionDate + ";03/20/2020;1;2222;978;3333;0000;1;fis123;12345678901;00");
+ expectedOutputFileAdeContent.add("99999;00;" + transmissionDate + ";03/20/2020;1;1111;978;22222;0000;1;fis123;12345678901;00");
+
+ return expectedOutputFileAdeContent;
+ }
+ private String getDateFormattedAsString() {
+ DateTimeFormatter fmt = DateTimeFormatter.ofPattern("yyyy-MM-dd");
+ return OffsetDateTime.now().format(fmt);
+ }
+}
\ No newline at end of file
diff --git a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchInputFileChecksumDisabledTest.java b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchInputFileChecksumDisabledTest.java
index 637f6562..608e4a49 100644
--- a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchInputFileChecksumDisabledTest.java
+++ b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchInputFileChecksumDisabledTest.java
@@ -98,6 +98,8 @@
"batchConfiguration.TransactionFilterBatch.transactionSenderRtd.enabled=false",
"batchConfiguration.TransactionFilterBatch.transactionSenderAde.enabled=false",
"batchConfiguration.TransactionFilterBatch.senderAdeAckFilesRecovery.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.directoryPath=classpath:/test-encrypt/reports",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.enabled=false",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessLogging=true",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessFileLogging=true",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnReadErrorLogging=true",
diff --git a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchTest.java b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchTest.java
index 64e54a73..4763cd22 100644
--- a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchTest.java
+++ b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchTest.java
@@ -13,6 +13,8 @@
import it.gov.pagopa.rtd.transaction_filter.connector.HpanRestClient.SasScope;
import it.gov.pagopa.rtd.transaction_filter.connector.SasResponse;
import it.gov.pagopa.rtd.transaction_filter.connector.SenderAdeAckRestClient;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileMetadata;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileReport;
import it.gov.pagopa.rtd.transaction_filter.service.StoreService;
import java.io.File;
import java.io.FileFilter;
@@ -21,11 +23,13 @@
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
+import java.time.LocalDateTime;
import java.time.OffsetDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
@@ -118,6 +122,8 @@
"batchConfiguration.TransactionFilterBatch.transactionSenderPending.enabled=false",
"batchConfiguration.TransactionFilterBatch.senderAdeAckFilesRecovery.enabled=true",
"batchConfiguration.TransactionFilterBatch.senderAdeAckFilesRecovery.directoryPath=classpath:/test-encrypt/sender-ade-ack",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.directoryPath=classpath:/test-encrypt/reports",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.enabled=false",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessLogging=true",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessFileLogging=true",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnReadErrorLogging=true",
@@ -198,7 +204,6 @@ public void jobExecutionProducesExpectedFiles() {
String publicKey = createPublicKey();
BDDMockito.doReturn(publicKey).when(storeServiceSpy).getKey("pagopa");
-
createPanPGP();
File outputFileTrn = createTrnOutputFile();
@@ -529,7 +534,6 @@ List createSenderAdeAckFiles() {
Files.write(secondFile.toPath(), secondFileContent);
files.add(secondFile);
- files.forEach(file -> System.out.println(file.getAbsolutePath()));
return files;
}
}
\ No newline at end of file
diff --git a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchWrongInputTest.java b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchWrongInputTest.java
index 0525df25..61750179 100644
--- a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchWrongInputTest.java
+++ b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/TransactionFilterBatchWrongInputTest.java
@@ -81,6 +81,8 @@
"batchConfiguration.TransactionFilterBatch.transactionSenderAde.enabled=false",
"batchConfiguration.TransactionFilterBatch.transactionSenderRtd.enabled=false",
"batchConfiguration.TransactionFilterBatch.senderAdeAckFilesRecovery.enabled=false",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.directoryPath=classpath:/test-encrypt/reports",
+ "batchConfiguration.TransactionFilterBatch.fileReportRecovery.enabled=false",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessLogging=true",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableAfterProcessFileLogging=true",
"batchConfiguration.TransactionFilterBatch.transactionFilter.readers.listener.enableOnReadErrorLogging=true",
diff --git a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/step/tasklet/FileManagementTaskletTest.java b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/step/tasklet/FileManagementTaskletTest.java
index 8081dc94..dccc8cb7 100644
--- a/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/step/tasklet/FileManagementTaskletTest.java
+++ b/api/batch/src/test/java/it/gov/pagopa/rtd/transaction_filter/batch/step/tasklet/FileManagementTaskletTest.java
@@ -2,17 +2,23 @@
import static org.assertj.core.api.Assertions.assertThat;
+import it.gov.pagopa.rtd.transaction_filter.batch.model.DeleteOutputFilesEnum;
import java.io.File;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
+import java.util.regex.Pattern;
import lombok.SneakyThrows;
import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.assertj.core.api.Condition;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepContribution;
@@ -23,7 +29,7 @@
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
-public class FileManagementTaskletTest {
+class FileManagementTaskletTest {
File successFile;
File errorFile;
@@ -31,601 +37,461 @@ public class FileManagementTaskletTest {
File errorHpanFile;
File outputFileCsv;
- @Rule
- public TemporaryFolder tempFolder = new TemporaryFolder(
- new File(getClass().getResource("/test-encrypt").getFile()));
+ @TempDir
+ Path tempDir;
+ PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
+
+ private final String OUTPUT_PATH = "test/output";
+ private final String PENDING_PATH = "test/output/pending";
+ private final String SUCCESS_PATH = "test/success";
+ private final String ERROR_PATH = "test/error";
+ private final String HPAN_PATH = "test/hpan";
+ private final String TRANSACTIONS_PATH = "test/trxs";
+ private final String LOGS_PATH = "test/logs";
+
+ @SneakyThrows
@Test
- public void testFileManagement_NoDeleteLocalFiles() {
-
- try {
-
- tempFolder.newFolder("test1");
- tempFolder.newFolder("test1","success");
- tempFolder.newFolder("test1","error");
- tempFolder.newFolder("test1","output");
- tempFolder.newFolder("test1","hpan");
- tempFolder.newFolder("test1","trxs");
-
- successFile = tempFolder.newFile("test1/trxs/success-trx.pgp");
- errorFile = tempFolder.newFile("test1/trxs/error-trx.pgp");
- hpanFile = tempFolder.newFile("test1/hpan/hpan.pgp");
- errorHpanFile = tempFolder.newFile("test1/hpan/error-hpan.pgp");
- tempFolder.newFile("test1/output/error-trx-output-file.pgp");
- tempFolder.newFile("test1/output/success-trx-output-file.pgp");
- tempFolder.newFile("test1/output/error-trx-output-file.csv");
- outputFileCsv = tempFolder.newFile("test1/output/success-trx-output-file.csv");
-
- PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
-
- FileManagementTasklet archivalTasklet = new FileManagementTasklet();
- archivalTasklet.setUploadPendingPath("classpath:/test-encrypt/**/test1/error");
- archivalTasklet.setSuccessPath("classpath:/test-encrypt/**/test1/success");
- archivalTasklet.setOutputDirectory("classpath:/test-encrypt/**/test1/output");
- archivalTasklet.setHpanDirectory("file:/"+resolver.getResources(
- "classpath:/test-encrypt/**/test1/hpan")[0].getFile().getAbsolutePath()+"/*.pgp");
- archivalTasklet.setDeleteProcessedFiles(false);
- archivalTasklet.setDeleteOutputFiles("NEVER");
- archivalTasklet.setManageHpanOnSuccess("DELETE");
-
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/error")[0].getFile(),
- new String[]{"pgp"},false).size());
- StepExecution execution = MetaDataInstanceFactory.createStepExecution();
-
- List stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution1 = MetaDataInstanceFactory.createStepExecution("A",1L);
- stepExecution1.setStatus(BatchStatus.COMPLETED);
- stepExecution1.getExecutionContext().put("fileName", successFile.getAbsolutePath());
- stepExecutions.add(stepExecution1);
-
- StepExecution stepExecution2 = MetaDataInstanceFactory.createStepExecution("B", 1L);
- stepExecution2.setStatus(BatchStatus.FAILED);
- stepExecution2.getExecutionContext().put("fileName", errorFile.getAbsolutePath());
- stepExecutions.add(stepExecution2);
-
- StepExecution stepExecution3 = MetaDataInstanceFactory.createStepExecution("C", 1L);
- stepExecution3.setStatus(BatchStatus.COMPLETED);
- stepExecution3.getExecutionContext().put("fileName", hpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution3);
-
- StepExecution stepExecution4 = MetaDataInstanceFactory.createStepExecution("D", 1L);
- stepExecution4.setStatus(BatchStatus.FAILED);
- stepExecution4.getExecutionContext().put("fileName", errorHpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution4);
-
- StepContext stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- ChunkContext chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(1,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/error")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- successFile.createNewFile();
-
- stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution5 = MetaDataInstanceFactory.createStepExecution("E", 1L);
- stepExecution5.setStatus(BatchStatus.COMPLETED);
- stepExecution5.getExecutionContext().put("fileName", successFile.getAbsolutePath());
- stepExecutions.add(stepExecution5);
-
- StepExecution stepExecution6 = MetaDataInstanceFactory.createStepExecution("F", 1L);
- stepExecution6.setStatus(BatchStatus.COMPLETED);
- stepExecution6.getExecutionContext().put("fileName",outputFileCsv.getAbsolutePath());
- stepExecutions.add(stepExecution6);
-
- execution = MetaDataInstanceFactory.createStepExecution();
- stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/success")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/output")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/output")[0].getFile(),
- new String[]{"csv"},false).size());
-
- } catch (Exception e) {
- e.printStackTrace();
- Assert.fail();
- }
+ void testFileManagement_NoDeleteLocalFiles() {
+
+ createDefaultDirectories();
+
+ successFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/success-trx.csv")).toFile();
+ errorFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/error-trx.csv")).toFile();
+ hpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/hpan.pgp")).toFile();
+ errorHpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/error-hpan.pgp")).toFile();
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/error-trx-output-file.pgp"));
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/success-trx-output-file.pgp"));
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/error-trx-output-file.csv"));
+ outputFileCsv = Files.createFile(tempDir.resolve(OUTPUT_PATH + "/success-trx-output-file.csv")).toFile();
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(DeleteOutputFilesEnum.KEEP.name());
+ archivalTasklet.setManageHpanOnSuccess("DELETE");
+
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getErrorFiles()).isEmpty();
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("INPUT_OK",BatchStatus.COMPLETED, "file:" + successFile.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepExecution stepExecution2 = createStepExecution("INPUT_FAILED", BatchStatus.FAILED, "file:" + errorFile.getAbsolutePath());
+ stepExecutions.add(stepExecution2);
+
+ StepExecution stepExecution3 = createStepExecution("HPAN_OK", BatchStatus.COMPLETED, "file:" + hpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution3);
+
+ StepExecution stepExecution4 = createStepExecution("HPAN_FAILED", BatchStatus.FAILED, "file:" + errorHpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution4);
+
+ StepExecution stepExecution6 = createStepExecution("OUTPUT_CSV_OK", BatchStatus.COMPLETED, "file:" + outputFileCsv.getAbsolutePath());
+ stepExecutions.add(stepExecution6);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).hasSize(1)
+ .extracting(File::getName).has(getNamePrefixCondition());
+ assertThat(getErrorFiles()).hasSize(2)
+ .extracting(File::getName).has(getNamePrefixCondition());
+
+ successFile.createNewFile();
+
+ stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution5 = createStepExecution("INPUT_OK", BatchStatus.COMPLETED, "file:" + successFile.getAbsolutePath());
+ stepExecutions.add(stepExecution5);
+
+ execution = MetaDataInstanceFactory.createStepExecution();
+ stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).hasSize(2);
+ assertThat(getCsvOutputFiles()).hasSize(2);
+ assertThat(getPgpOutputFiles()).hasSize(2);
}
+ @SneakyThrows
@Test
- public void testFileManagement_NoDeleteLocalFiles_WithSkips() {
-
- try {
-
- tempFolder.newFolder("test1");
- tempFolder.newFolder("test1","success");
- tempFolder.newFolder("test1","error");
- tempFolder.newFolder("test1","output");
- tempFolder.newFolder("test1","hpan");
- tempFolder.newFolder("test1","trxs");
-
- successFile = tempFolder.newFile("test1/trxs/success-trx.pgp");
- errorFile = tempFolder.newFile("test1/trxs/error-trx.pgp");
- hpanFile = tempFolder.newFile("test1/hpan/hpan.pgp");
- errorHpanFile = tempFolder.newFile("test1/hpan/error-hpan.pgp");
- tempFolder.newFile("test1/output/error-trx-output-file.pgp");
- tempFolder.newFile("test1/output/success-trx-output-file.pgp");
- tempFolder.newFile("test1/output/error-trx-output-file.csv");
-
- PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
-
- FileManagementTasklet archivalTasklet = new FileManagementTasklet();
- archivalTasklet.setUploadPendingPath("classpath:/test-encrypt/**/test1/error");
- archivalTasklet.setSuccessPath("classpath:/test-encrypt/**/test1/success");
- archivalTasklet.setOutputDirectory("classpath:/test-encrypt/**/test1/output");
- archivalTasklet.setHpanDirectory("file:/"+resolver.getResources(
- "classpath:/test-encrypt/**/test1/hpan")[0].getFile().getAbsolutePath()+"/*.pgp");
- archivalTasklet.setDeleteProcessedFiles(false);
- archivalTasklet.setDeleteOutputFiles("NEVER");
- archivalTasklet.setManageHpanOnSuccess("DELETE");
-
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/error")[0].getFile(),
- new String[]{"pgp"},false).size());
- StepExecution execution = MetaDataInstanceFactory.createStepExecution();
-
- List stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution1 = MetaDataInstanceFactory.createStepExecution("A",1L);
- stepExecution1.setStatus(BatchStatus.COMPLETED);
- stepExecution1.setExitStatus(new ExitStatus("COMPLETED WITH SKIPS"));
- stepExecution1.getExecutionContext().put("fileName", successFile.getAbsolutePath());
- stepExecutions.add(stepExecution1);
-
- StepExecution stepExecution2 = MetaDataInstanceFactory.createStepExecution("B", 1L);
- stepExecution2.setStatus(BatchStatus.FAILED);
- stepExecution2.getExecutionContext().put("fileName", errorFile.getAbsolutePath());
- stepExecutions.add(stepExecution2);
-
- StepExecution stepExecution3 = MetaDataInstanceFactory.createStepExecution("C", 1L);
- stepExecution3.setStatus(BatchStatus.COMPLETED);
- stepExecution3.getExecutionContext().put("fileName", hpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution3);
-
- StepExecution stepExecution4 = MetaDataInstanceFactory.createStepExecution("D", 1L);
- stepExecution4.setStatus(BatchStatus.FAILED);
- stepExecution4.getExecutionContext().put("fileName", errorHpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution4);
-
- StepContext stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- ChunkContext chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(1,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/error")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- successFile.createNewFile();
-
- stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution5 = MetaDataInstanceFactory.createStepExecution("E", 1L);
- stepExecution5.setStatus(BatchStatus.COMPLETED);
- stepExecution5.getExecutionContext().put("fileName",successFile.getAbsolutePath());
- stepExecutions.add(stepExecution5);
-
- execution = MetaDataInstanceFactory.createStepExecution();
- stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/success")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/output")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- Assert.assertEquals(1,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test1/output")[0].getFile(),
- new String[]{"csv"},false).size());
-
- } catch (Exception e) {
- e.printStackTrace();
- Assert.fail();
- }
+ void testFileManagement_NoDeleteLocalFiles_WithSkips() {
+
+ createDefaultDirectories();
+
+ successFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/success-trx.csv")).toFile();
+ errorFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/error-trx.csv")).toFile();
+ hpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/hpan.pgp")).toFile();
+ errorHpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/error-hpan.pgp")).toFile();
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/error-trx-output-file.pgp"));
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/success-trx-output-file.pgp"));
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/error-trx-output-file.csv"));
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(DeleteOutputFilesEnum.KEEP.name());
+ archivalTasklet.setManageHpanOnSuccess("DELETE");
+
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getErrorFiles()).isEmpty();
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("INPUT_OK",BatchStatus.COMPLETED, "file:" + successFile.getAbsolutePath());
+ stepExecution1.setExitStatus(new ExitStatus("COMPLETED WITH SKIPS"));
+ stepExecutions.add(stepExecution1);
+
+ StepExecution stepExecution2 = createStepExecution("INPUT_FAILED", BatchStatus.FAILED, "file:" + errorFile.getAbsolutePath());
+ stepExecutions.add(stepExecution2);
+
+ StepExecution stepExecution3 = createStepExecution("HPAN_OK", BatchStatus.COMPLETED, "file:" + hpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution3);
+
+ StepExecution stepExecution4 = createStepExecution("HPAN_FAILED", BatchStatus.FAILED, "file:" + errorHpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution4);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).hasSize(1);
+ assertThat(getErrorFiles()).hasSize(2);
+
+ successFile.createNewFile();
+
+ stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution5 = createStepExecution("INPUT_OK", BatchStatus.COMPLETED, "file:" + successFile.getAbsolutePath());
+ stepExecutions.add(stepExecution5);
+
+ execution = MetaDataInstanceFactory.createStepExecution();
+ stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).hasSize(2);
+ assertThat(getCsvOutputFiles()).hasSize(1);
+ assertThat(getPgpOutputFiles()).hasSize(2);
}
+ @SneakyThrows
@Test
- public void testFileManagement_DeleteLocalFiles() {
-
- try {
-
- tempFolder.newFolder("test2");
- tempFolder.newFolder("test2","success");
- tempFolder.newFolder("test2","error");
- tempFolder.newFolder("test2","output");
- tempFolder.newFolder("test2","hpan");
- tempFolder.newFolder("test2","trxs");
-
- successFile = tempFolder.newFile("test2/trxs/success-trx.pgp");
- errorFile = tempFolder.newFile("test2/trxs/error-trx.pgp");
- hpanFile = tempFolder.newFile("test2/hpan/hpan.pgp");
- errorHpanFile = tempFolder.newFile("test2/hpan/error-hpan.pgp");
- tempFolder.newFile("test2/output/error-trx-output-file.pgp");
- tempFolder.newFile("test2/output/success-trx-output-file.pgp");
- tempFolder.newFile("test2/output/error-trx-output-file.csv");
-
- FileManagementTasklet archivalTasklet = new FileManagementTasklet();
- archivalTasklet.setUploadPendingPath("classpath:/test-encrypt/**/test2/error");
- archivalTasklet.setSuccessPath("classpath:/test-encrypt/**/test2/success");
- archivalTasklet.setOutputDirectory("classpath:/test-encrypt/**/test2/output");
- archivalTasklet.setHpanDirectory("classpath:/test-encrypt/**/test2/hpan");
- archivalTasklet.setDeleteProcessedFiles(true);
- archivalTasklet.setDeleteOutputFiles("ALWAYS");
- archivalTasklet.setManageHpanOnSuccess("DELETE");
-
- PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test2/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test2/error")[0].getFile(),
- new String[]{"pgp"},false).size());
- StepExecution execution = MetaDataInstanceFactory.createStepExecution();
-
- List stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution1 = MetaDataInstanceFactory.createStepExecution("A",1L);
- stepExecution1.setStatus(BatchStatus.COMPLETED);
- stepExecution1.getExecutionContext().put("fileName", successFile.getAbsolutePath());
- stepExecutions.add(stepExecution1);
-
- StepExecution stepExecution2 = MetaDataInstanceFactory.createStepExecution("B", 1L);
- stepExecution2.setStatus(BatchStatus.FAILED);
- stepExecution2.getExecutionContext().put("fileName", errorFile.getAbsolutePath());
- stepExecutions.add(stepExecution2);
-
- StepExecution stepExecution3 = MetaDataInstanceFactory.createStepExecution("C", 1L);
- stepExecution3.setStatus(BatchStatus.COMPLETED);
- stepExecution3.getExecutionContext().put("fileName", hpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution3);
-
- StepExecution stepExecution4 = MetaDataInstanceFactory.createStepExecution("D", 1L);
- stepExecution4.setStatus(BatchStatus.FAILED);
- stepExecution4.getExecutionContext().put("fileName", errorHpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution4);
-
- StepContext stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- ChunkContext chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test2/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test2/error")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- successFile.createNewFile();
-
- stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution5 = MetaDataInstanceFactory.createStepExecution("E", 1L);
- stepExecution5.setStatus(BatchStatus.COMPLETED);
- stepExecution5.getExecutionContext().put("fileName",successFile.getAbsolutePath());
- stepExecutions.add(stepExecution5);
-
- execution = MetaDataInstanceFactory.createStepExecution();
- stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test2/success")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test2/output")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- } catch (Exception e) {
- e.printStackTrace();
- Assert.fail();
- }
+ void testFileManagement_DeleteLocalFiles() {
+
+ createDefaultDirectories();
+
+ successFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/success-trx.csv")).toFile();
+ errorFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/error-trx.csv")).toFile();
+ hpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/hpan.pgp")).toFile();
+ errorHpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/error-hpan.pgp")).toFile();
+ File pgpFileFailed = Files.createFile(tempDir.resolve(OUTPUT_PATH + "/error-trx-output-file.pgp")).toFile();
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/success-trx-output-file.pgp"));
+ File csvOutputFile = Files.createFile(tempDir.resolve(OUTPUT_PATH + "/error-trx-output-file.csv")).toFile();
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(true);
+ archivalTasklet.setDeleteOutputFiles(DeleteOutputFilesEnum.ALWAYS.name());
+ archivalTasklet.setManageHpanOnSuccess("DELETE");
+
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getErrorFiles()).isEmpty();
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("INPUT_OK",BatchStatus.COMPLETED, "file:" + successFile.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepExecution stepExecution2 = createStepExecution("INPUT_FAILED", BatchStatus.FAILED, "file:" + errorFile.getAbsolutePath());
+ stepExecutions.add(stepExecution2);
+
+ StepExecution stepExecution3 = createStepExecution("HPAN_OK", BatchStatus.COMPLETED, "file:" + hpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution3);
+
+ StepExecution stepExecution4 = createStepExecution("HPAN_FAILED", BatchStatus.FAILED, "file:" + errorHpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution4);
+
+ StepExecution stepExecution6 = createStepExecution("PGP_SEND_FAILED", BatchStatus.FAILED, "file:" + pgpFileFailed.getAbsolutePath());
+ stepExecutions.add(stepExecution6);
+
+ StepExecution stepExecution7 = createStepExecution("ENCRYPT_FILE_CSV", BatchStatus.FAILED, "file:" + csvOutputFile.getAbsolutePath());
+ stepExecutions.add(stepExecution7);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getErrorFiles()).isEmpty();
+
+ successFile.createNewFile();
+
+ stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution5 = createStepExecution("INPUT_OK", BatchStatus.COMPLETED, "file:" + successFile.getAbsolutePath());
+ stepExecutions.add(stepExecution5);
+
+ execution = MetaDataInstanceFactory.createStepExecution();
+ stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getPgpOutputFiles()).isEmpty();
}
+ @SneakyThrows
@Test
- public void testFileManagement_DeleteOutputFilesOnErrors() {
-
- try {
-
- tempFolder.newFolder("test3");
- tempFolder.newFolder("test3","success");
- tempFolder.newFolder("test3","error");
- tempFolder.newFolder("test3","output");
- tempFolder.newFolder("test3","hpan");
- tempFolder.newFolder("test3","trxs");
-
- successFile = tempFolder.newFile("test3/trxs/success-trx.pgp");
- errorFile = tempFolder.newFile("test3/trxs/error-trx.pgp");
- hpanFile = tempFolder.newFile("test3/hpan/hpan.pgp");
- errorHpanFile = tempFolder.newFile("test3/hpan/error-hpan.pgp");
- tempFolder.newFile("test3/output/error-trx-output-file.pgp");
- tempFolder.newFile("test3/output/success-trx-output-file.pgp");
- tempFolder.newFile("test3/output/error-trx-output-file.csv");
-
- PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
- FileManagementTasklet archivalTasklet = new FileManagementTasklet();
- archivalTasklet.setUploadPendingPath("classpath:/test-encrypt/**/test3/error");
- archivalTasklet.setSuccessPath("classpath:/test-encrypt/**/test3/success");
- archivalTasklet.setOutputDirectory("classpath:/test-encrypt/**/test3/output");
- archivalTasklet.setHpanDirectory("file:/"+resolver.getResources(
- "classpath:/test-encrypt/**/test3/hpan")[0].getFile().getAbsolutePath()+"/*.pgp");
- archivalTasklet.setDeleteProcessedFiles(false);
- archivalTasklet.setDeleteOutputFiles("ERROR");
- archivalTasklet.setManageHpanOnSuccess("DELETE");
-
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test3/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test3/error")[0].getFile(),
- new String[]{"pgp"},false).size());
- StepExecution execution = MetaDataInstanceFactory.createStepExecution();
-
- List stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution1 = MetaDataInstanceFactory.createStepExecution("A",1L);
- stepExecution1.setStatus(BatchStatus.COMPLETED);
- stepExecution1.getExecutionContext().put("fileName", successFile.getAbsolutePath());
- stepExecutions.add(stepExecution1);
-
- StepExecution stepExecution2 = MetaDataInstanceFactory.createStepExecution("B", 1L);
- stepExecution2.setStatus(BatchStatus.FAILED);
- stepExecution2.getExecutionContext().put("fileName", errorFile.getAbsolutePath());
- stepExecutions.add(stepExecution2);
-
- StepExecution stepExecution3 = MetaDataInstanceFactory.createStepExecution("C", 1L);
- stepExecution3.setStatus(BatchStatus.COMPLETED);
- stepExecution3.getExecutionContext().put("fileName", hpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution3);
-
- StepExecution stepExecution4 = MetaDataInstanceFactory.createStepExecution("D", 1L);
- stepExecution4.setStatus(BatchStatus.FAILED);
- stepExecution4.getExecutionContext().put("fileName", errorHpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution4);
-
- StepContext stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- ChunkContext chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(1,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test3/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test3/error")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- successFile.createNewFile();
-
- stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution5 = MetaDataInstanceFactory.createStepExecution("E", 1L);
- stepExecution5.setStatus(BatchStatus.COMPLETED);
- stepExecution5.getExecutionContext().put("fileName",successFile.getAbsolutePath());
- stepExecutions.add(stepExecution5);
-
- execution = MetaDataInstanceFactory.createStepExecution();
- stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test3/success")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- Assert.assertEquals(1,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test3/output")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- } catch (Exception e) {
- e.printStackTrace();
- Assert.fail();
- }
+ void testFileManagement_DeleteOutputFilesOnErrors() {
+
+ createDefaultDirectories();
+
+ successFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/success-trx.csv")).toFile();
+ errorFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/CSTAR.12345.TRNLOG.20221010.123456.001.csv")).toFile();
+ File pgpFileFailed = Files.createFile(tempDir.resolve(OUTPUT_PATH + "/ADE.12345.20221010.123456.001.01.csv.pgp")).toFile();
+ File csvOutputFile = Files.createFile(tempDir.resolve(OUTPUT_PATH + "/ADE.12345.20221010.123456.001.01.csv")).toFile();
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/success-trx-output-file.pgp"));
+ // test file csv deletion without filename been explicitly handled by any step, only valid for RTD files right now
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/CSTAR.12345.TRNLOG.20221010.123456.001.csv"));
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(DeleteOutputFilesEnum.ERROR.name());
+ archivalTasklet.setManageHpanOnSuccess("DELETE");
+
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getErrorFiles()).isEmpty();
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("INPUT_OK",BatchStatus.COMPLETED, "file:" + successFile.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepExecution stepExecution2 = createStepExecution("INPUT_FAILED", BatchStatus.FAILED, "file:" + errorFile.getAbsolutePath());
+ stepExecutions.add(stepExecution2);
+
+ StepExecution stepExecution3 = createStepExecution("PGP_SEND_FAILED", BatchStatus.FAILED, "file:" + pgpFileFailed.getAbsolutePath());
+ stepExecutions.add(stepExecution3);
+
+ StepExecution stepExecution4 = createStepExecution("ENCRYPT_FILE_CSV", BatchStatus.FAILED, "file:" + csvOutputFile.getAbsolutePath());
+ stepExecutions.add(stepExecution4);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).hasSize(1);
+ assertThat(getErrorFiles()).hasSize(1);
+ assertThat(getPgpPendingFiles()).hasSize(1);
+ assertThat(getCsvOutputFiles()).isEmpty();
+ assertThat(getPgpOutputFiles()).hasSize(1);
+
+ successFile.createNewFile();
+
+ stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution5 = createStepExecution("INPUT_OK", BatchStatus.COMPLETED, "file:" + successFile.getAbsolutePath());
+ stepExecutions.add(stepExecution5);
+
+ execution = MetaDataInstanceFactory.createStepExecution();
+ stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).hasSize(2);
+ assertThat(getPgpOutputFiles()).hasSize(1);
+ }
+
+ @SneakyThrows
+ @Test
+ void testFileManagement_KeepHpanInLocation() {
+
+ createDefaultDirectories();
+
+ successFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/success-trx.csv")).toFile();
+ errorFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/error-trx.csv")).toFile();
+ hpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/hpan.pgp")).toFile();
+ errorHpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/error-hpan.pgp")).toFile();
+ File pgpFileFailed = Files.createFile(tempDir.resolve(OUTPUT_PATH + "/error-trx-output-file.pgp")).toFile();
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/success-trx-output-file.pgp"));
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + "/error-trx-output-file.csv"));
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(DeleteOutputFilesEnum.ERROR.name());
+ archivalTasklet.setManageHpanOnSuccess("KEEP");
+
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getHpanFiles()).hasSize(2);
+ assertThat(getErrorFiles()).isEmpty();
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("INPUT_FILE_SUCCESS", BatchStatus.COMPLETED, "file:" + successFile.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepExecution stepExecution2 = createStepExecution("INPUT_FILE_ERROR", BatchStatus.FAILED, "file:" + errorFile.getAbsolutePath());
+ stepExecutions.add(stepExecution2);
+
+ StepExecution stepExecution3 = createStepExecution("HPAN_OK", BatchStatus.COMPLETED, "file:" + hpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution3);
+
+ StepExecution stepExecution4 = createStepExecution("HPAN_FAILED", BatchStatus.FAILED, "file:" + errorHpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution4);
+
+ StepExecution stepExecution6 = createStepExecution("PGP_SEND_FAILED", BatchStatus.FAILED, "file:" + pgpFileFailed.getAbsolutePath());
+ stepExecutions.add(stepExecution6);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).hasSize(1);
+ assertThat(getHpanFiles()).hasSize(1);
+ assertThat(getErrorFiles()).hasSize(2);
+
+ successFile.createNewFile();
+
+ stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution5 = createStepExecution("E", BatchStatus.COMPLETED, successFile.getAbsolutePath());
+ stepExecutions.add(stepExecution5);
+
+ execution = MetaDataInstanceFactory.createStepExecution();
+ stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getHpanFiles()).hasSize(1);
+ assertThat(getSuccessFiles()).hasSize(2);
+ assertThat(getPgpOutputFiles()).hasSize(1);
}
+ @SneakyThrows
@Test
- public void testFileManagement_KeepHpanInLocation() {
-
- try {
-
- tempFolder.newFolder("test4");
- tempFolder.newFolder("test4","success");
- tempFolder.newFolder("test4","error");
- tempFolder.newFolder("test4","output");
- tempFolder.newFolder("test4","hpan");
- tempFolder.newFolder("test4","trxs");
-
- successFile = tempFolder.newFile("test4/trxs/success-trx.pgp");
- errorFile = tempFolder.newFile("test4/trxs/error-trx.pgp");
- hpanFile = tempFolder.newFile("test4/hpan/hpan.pgp");
- errorHpanFile = tempFolder.newFile("test4/hpan/error-hpan.pgp");
- tempFolder.newFile("test4/output/error-trx-output-file.pgp");
- tempFolder.newFile("test4/output/success-trx-output-file.pgp");
- tempFolder.newFile("test4/output/error-trx-output-file.csv");
-
- PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
- FileManagementTasklet archivalTasklet = new FileManagementTasklet();
- archivalTasklet.setUploadPendingPath("classpath:/test-encrypt/**/test4/error");
- archivalTasklet.setSuccessPath("classpath:/test-encrypt/**/test4/success");
- archivalTasklet.setOutputDirectory("classpath:/test-encrypt/**/test4/output");
- archivalTasklet.setHpanDirectory("file:/"+resolver.getResources(
- "classpath:/test-encrypt/**/hpan")[0].getFile().getAbsolutePath()+"/*.pgp");
- archivalTasklet.setDeleteProcessedFiles(false);
- archivalTasklet.setDeleteOutputFiles("ERROR");
- archivalTasklet.setManageHpanOnSuccess("KEEP");
-
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test4/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(0,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test4/error")[0].getFile(),
- new String[]{"pgp"},false).size());
- StepExecution execution = MetaDataInstanceFactory.createStepExecution();
-
- List stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution1 = MetaDataInstanceFactory.createStepExecution("A",1L);
- stepExecution1.setStatus(BatchStatus.COMPLETED);
- stepExecution1.getExecutionContext().put("fileName", successFile.getAbsolutePath());
- stepExecutions.add(stepExecution1);
-
- StepExecution stepExecution2 = MetaDataInstanceFactory.createStepExecution("B", 1L);
- stepExecution2.setStatus(BatchStatus.FAILED);
- stepExecution2.getExecutionContext().put("fileName", errorFile.getAbsolutePath());
- stepExecutions.add(stepExecution2);
-
- StepExecution stepExecution3 = MetaDataInstanceFactory.createStepExecution("C", 1L);
- stepExecution3.setStatus(BatchStatus.COMPLETED);
- stepExecution3.getExecutionContext().put("fileName", hpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution3);
-
- StepExecution stepExecution4 = MetaDataInstanceFactory.createStepExecution("D", 1L);
- stepExecution4.setStatus(BatchStatus.FAILED);
- stepExecution4.getExecutionContext().put("fileName", errorHpanFile.getAbsolutePath());
- stepExecutions.add(stepExecution4);
-
- StepContext stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- ChunkContext chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(1,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test4/success")[0].getFile(),
- new String[]{"pgp"},false).size());
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test4/error")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- successFile.createNewFile();
-
- stepExecutions = new ArrayList<>();
-
- StepExecution stepExecution5 = MetaDataInstanceFactory.createStepExecution("E", 1L);
- stepExecution5.setStatus(BatchStatus.COMPLETED);
- stepExecution5.getExecutionContext().put("fileName",successFile.getAbsolutePath());
- stepExecutions.add(stepExecution5);
-
- execution = MetaDataInstanceFactory.createStepExecution();
- stepContext = new StepContext(execution);
- stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
- chunkContext = new ChunkContext(stepContext);
-
- archivalTasklet.execute(new StepContribution(execution),chunkContext);
-
- Assert.assertEquals(1,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test4/hpan")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- Assert.assertEquals(2,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test4/success")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- Assert.assertEquals(1,
- FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test4/output")[0].getFile(),
- new String[]{"pgp"},false).size());
-
- } catch (Exception e) {
- e.printStackTrace();
- Assert.fail();
+ void testFileManagement_ArchiveHpan() {
+
+ createDefaultDirectories();
+
+ hpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/hpan.pgp")).toFile();
+ errorHpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + "/error-hpan.pgp")).toFile();
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(DeleteOutputFilesEnum.ERROR.name());
+ archivalTasklet.setManageHpanOnSuccess("ARCHIVE");
+ archivalTasklet.afterPropertiesSet();
+
+ assertThat(getHpanFiles()).hasSize(2);
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("HPAN_OK", BatchStatus.COMPLETED, "file:" + hpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepExecution stepExecution2 = createStepExecution("HPAN_FAILED", BatchStatus.FAILED, "file:" + errorHpanFile.getAbsolutePath());
+ stepExecutions.add(stepExecution2);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ assertThat(getSuccessFiles()).hasSize(1)
+ .extracting(File::getName).has(getNamePrefixCondition());
+ assertThat(getHpanFiles()).isEmpty();
+ assertThat(getErrorFiles()).hasSize(1)
+ .extracting(File::getName).has(getNamePrefixCondition());;
+ }
+
+ @SneakyThrows
+ @EnumSource(DeleteOutputFilesEnum.class)
+ @ParameterizedTest
+ void givenDeleteOutputFilesPolicyWhenRunFileManagementThenMovePgpOutputFilesToPendingFolder(DeleteOutputFilesEnum deleteOutputFilesFlag) {
+
+ createDefaultDirectories();
+
+ File outputFilePgp = Files.createFile(tempDir.resolve(OUTPUT_PATH + File.separator + "trx-output-file.pgp")).toFile();
+ File outputFileCsv = Files.createFile(tempDir.resolve(OUTPUT_PATH + File.separator + "trx-output-file.csv")).toFile();
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(deleteOutputFilesFlag.name());
+ archivalTasklet.setManageHpanOnSuccess("DELETE");
+
+ // pre-condition on initial setup
+ assertThat(getPgpOutputFiles()).hasSize(1);
+ assertThat(getCsvOutputFiles()).hasSize(1);
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("SEND_PGP_STEP", BatchStatus.FAILED, "file:" + outputFilePgp.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepExecution stepExecution2 = createStepExecution("ENCRYPT_AGGREGATE_STEP", BatchStatus.COMPLETED, "file:" + outputFileCsv.getAbsolutePath());
+ stepExecutions.add(stepExecution2);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ // the file pgp has been moved from output to output/pending
+ assertThat(getPgpPendingFiles()).hasSize(1)
+ .extracting(File::getName).containsExactly(outputFilePgp.getName());
+ switch (deleteOutputFilesFlag) {
+ case KEEP:
+ assertThat(getCsvOutputFiles()).hasSize(1)
+ .extracting(File::getName).containsExactly(outputFileCsv.getName()); break;
+ case ERROR:
+ case ALWAYS:
+ assertThat(getCsvOutputFiles()).isEmpty();
}
}
@SneakyThrows
@Test
- public void whenLogFilesAreEmptyThenDeleteThem() {
- tempFolder.newFolder("test");
- tempFolder.newFolder("test","hpan");
- tempFolder.newFolder("test","logs");
+ void whenLogFilesAreEmptyThenDeleteThem() {
+ createDefaultDirectories();
- hpanFile = tempFolder.newFile("test/hpan/hpan.pgp");
+ hpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + File.separator + "hpan.pgp")).toFile();
- tempFolder.newFile("test/logs/empty-log-file.csv");
- File logFileNotEmpty = tempFolder.newFile("test/logs/not-empty-log-file.csv");
- FileUtils.write(logFileNotEmpty, "this;is;a;not;empty;log");
+ Files.createFile(tempDir.resolve(LOGS_PATH + File.separator + "empty-log-file.csv"));
+ File logFileNotEmpty = Files.createFile(tempDir.resolve(LOGS_PATH + File.separator + "not-empty-log-file.csv")).toFile();
+ FileUtils.write(logFileNotEmpty, "this;is;a;not;empty;log", Charset.defaultCharset());
- PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
- FileManagementTasklet archivalTasklet = new FileManagementTasklet();
- archivalTasklet.setHpanDirectory("file:/"+resolver.getResources(
- "classpath:/test-encrypt/**/hpan")[0].getFile().getAbsolutePath()+"/*.pgp");
- archivalTasklet.setLogsDirectory("classpath:/test-encrypt/**/test/logs");
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
StepExecution execution = MetaDataInstanceFactory.createStepExecution();
StepContext stepContext = new StepContext(execution);
@@ -633,30 +499,24 @@ public void whenLogFilesAreEmptyThenDeleteThem() {
archivalTasklet.execute(new StepContribution(execution),chunkContext);
- Collection logFiles = FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test/logs")[0].getFile(),
- new String[]{"csv"},false);
+ Collection logFiles = getLogFiles();
assertThat(logFiles).isNotEmpty().hasSize(1).contains(logFileNotEmpty);
}
@SneakyThrows
@Test
- public void whenLogDirectoryIsNotSetThenTaskletDoNotDeleteEmptyLogs() {
- tempFolder.newFolder("test");
- tempFolder.newFolder("test","hpan");
- tempFolder.newFolder("test","logs");
+ void whenLogDirectoryIsNotSetThenTaskletDoNotDeleteEmptyLogs() {
+ createDefaultDirectories();
- hpanFile = tempFolder.newFile("test/hpan/hpan.pgp");
+ hpanFile = Files.createFile(tempDir.resolve(HPAN_PATH + File.separator + "hpan.pgp")).toFile();
- tempFolder.newFile("test/logs/empty-log-file.csv");
- File logFileNotEmpty = tempFolder.newFile("test/logs/not-empty-log-file.csv");
- FileUtils.write(logFileNotEmpty, "this;is;a;not;empty;log");
+ Files.createFile(tempDir.resolve(LOGS_PATH + File.separator + "empty-log-file.csv"));
+ File logFileNotEmpty = Files.createFile(tempDir.resolve(LOGS_PATH + File.separator + "not-empty-log-file.csv")).toFile();
+ FileUtils.write(logFileNotEmpty, "this;is;a;not;empty;log", Charset.defaultCharset());
- PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
- FileManagementTasklet archivalTasklet = new FileManagementTasklet();
- archivalTasklet.setHpanDirectory("file:/"+resolver.getResources(
- "classpath:/test-encrypt/**/hpan")[0].getFile().getAbsolutePath()+"/*.pgp");
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setLogsDirectory(null);
StepExecution execution = MetaDataInstanceFactory.createStepExecution();
StepContext stepContext = new StepContext(execution);
@@ -664,16 +524,243 @@ public void whenLogDirectoryIsNotSetThenTaskletDoNotDeleteEmptyLogs() {
archivalTasklet.execute(new StepContribution(execution),chunkContext);
- Collection logFiles = FileUtils.listFiles(
- resolver.getResources("classpath:/test-encrypt/**/test/logs")[0].getFile(),
- new String[]{"csv"},false);
+ Collection logFiles = getLogFiles();
assertThat(logFiles).isNotEmpty().hasSize(2);
}
- @After
- public void tearDown() {
- tempFolder.delete();
+ @SneakyThrows
+ @Test
+ void whenThereAreMoreStepsWithSameFilenameThenEvaluateWorstStatus() {
+
+ createDefaultDirectories();
+
+ File inputFile = Files.createFile(tempDir.resolve(TRANSACTIONS_PATH + "/success-trx.csv")).toFile();
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(DeleteOutputFilesEnum.KEEP.name());
+ archivalTasklet.setManageHpanOnSuccess("KEEP");
+
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getErrorFiles()).isEmpty();
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("INPUT_FILE_SUCCESS_EG_CHECKSUM", BatchStatus.COMPLETED, "file:" + inputFile.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepExecution stepExecution2 = createStepExecution("INPUT_FILE_ERROR_EG_TRANSACTION_PROCESS", BatchStatus.FAILED, "file:" + inputFile.getAbsolutePath());
+ stepExecutions.add(stepExecution2);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ // the FAILED status is evaluated and the file is moved into "error" folder
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getErrorFiles()).hasSize(1);
+
+ // invert the orders of the steps and retest
+ stepExecutions = new ArrayList<>();
+ stepExecution1 = createStepExecution("INPUT_FILE_ERROR_EG_TRANSACTION_PROCESS", BatchStatus.FAILED, "file:" + inputFile.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ stepExecution2 = createStepExecution("INPUT_FILE_SUCCESS_EG_CHECKSUM", BatchStatus.COMPLETED, "file:" + inputFile.getAbsolutePath());
+ stepExecutions.add(stepExecution2);
+
+ stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ // the assertion must be step order-independent
+ assertThat(getSuccessFiles()).isEmpty();
+ assertThat(getErrorFiles()).hasSize(1);
}
+ @SneakyThrows
+ @EnumSource(DeleteOutputFilesEnum.class)
+ @ParameterizedTest
+ void givenPendingFilesWhenSendPendingStepFailThenFilesAreNotMoved(DeleteOutputFilesEnum deleteOutputFilesFlag) {
+
+ createDefaultDirectories();
+
+ File pendingFile = Files.createFile(tempDir.resolve(PENDING_PATH + File.separator + "file-to-send-again.pgp")).toFile();
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(deleteOutputFilesFlag.name());
+ archivalTasklet.setManageHpanOnSuccess("DELETE");
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("SEND_PENDING_STEP", BatchStatus.FAILED, "file:" + pendingFile.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ // the file pgp has been moved from output to output/pending
+ assertThat(getPgpPendingFiles()).hasSize(1);
+ }
+
+ @SneakyThrows
+ @EnumSource(DeleteOutputFilesEnum.class)
+ @ParameterizedTest
+ void givenPendingFilesWhenSendPendingStepSuccessThenFilesAreDeleted(DeleteOutputFilesEnum deleteOutputFilesFlag) {
+
+ createDefaultDirectories();
+
+ File pendingFile = Files.createFile(tempDir.resolve(PENDING_PATH + File.separator + "file-to-send-again.pgp")).toFile();
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(deleteOutputFilesFlag.name());
+ archivalTasklet.setManageHpanOnSuccess("DELETE");
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("SEND_PENDING_STEP", BatchStatus.COMPLETED, "file:" + pendingFile.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ // the file pgp has been moved from output to output/pending
+ assertThat(getPgpPendingFiles()).isEmpty();
+ }
+
+ @SneakyThrows
+ @Test
+ void givenOutputFilesFromPreviousRunsWhenDeleteOutputFilesIsAlwaysThenDoRemoveOldFilesToo() {
+ createDefaultDirectories();
+
+ Files.createFile(tempDir.resolve(OUTPUT_PATH + File.separator + "old-file.pgp"));
+ File outputFileToSend = Files.createFile(tempDir.resolve(OUTPUT_PATH + File.separator + "file-to-send.pgp")).toFile();
+
+ FileManagementTasklet archivalTasklet = createTaskletWithDefaultDirectories();
+ archivalTasklet.setDeleteProcessedFiles(false);
+ archivalTasklet.setDeleteOutputFiles(DeleteOutputFilesEnum.ALWAYS.name());
+ archivalTasklet.setManageHpanOnSuccess("DELETE");
+
+ assertThat(getPgpOutputFiles()).hasSize(2);
+
+ StepExecution execution = MetaDataInstanceFactory.createStepExecution();
+ List stepExecutions = new ArrayList<>();
+
+ StepExecution stepExecution1 = createStepExecution("SEND_OUTPUT_FILE", BatchStatus.COMPLETED, "file:" + outputFileToSend.getAbsolutePath());
+ stepExecutions.add(stepExecution1);
+
+ StepContext stepContext = new StepContext(execution);
+ stepContext.getStepExecution().getJobExecution().addStepExecutions(stepExecutions);
+ ChunkContext chunkContext = new ChunkContext(stepContext);
+
+ archivalTasklet.execute(new StepContribution(execution),chunkContext);
+
+ // the file pgp has been moved from output to output/pending
+ assertThat(getPgpOutputFiles()).isEmpty();
+ }
+
+ private FileManagementTasklet createTaskletWithDefaultDirectories() {
+ FileManagementTasklet archivalTasklet = new FileManagementTasklet();
+ archivalTasklet.setUploadPendingPath("file:" + tempDir + File.separator + PENDING_PATH);
+ archivalTasklet.setSuccessPath("file:" + tempDir + File.separator + SUCCESS_PATH);
+ archivalTasklet.setOutputDirectory("file:" + tempDir + File.separator + OUTPUT_PATH);
+ archivalTasklet.setHpanDirectory("file:" + tempDir + File.separator + HPAN_PATH + "/*.pgp");
+ archivalTasklet.setLogsDirectory("file:" + tempDir + File.separator + LOGS_PATH);
+ archivalTasklet.setErrorPath("file:" + tempDir + File.separator + ERROR_PATH);
+
+ return archivalTasklet;
+ }
+
+ @SneakyThrows
+ private void createDefaultDirectories() {
+// Files.createDirectory(tempDir.resolve("test"));
+ Files.createDirectories(tempDir.resolve(PENDING_PATH));
+ Files.createDirectory(tempDir.resolve(SUCCESS_PATH));
+ Files.createDirectory(tempDir.resolve(ERROR_PATH));
+ Files.createDirectory(tempDir.resolve(HPAN_PATH));
+ Files.createDirectory(tempDir.resolve(TRANSACTIONS_PATH));
+ Files.createDirectory(tempDir.resolve(LOGS_PATH));
+ }
+
+ private StepExecution createStepExecution(String stepName, BatchStatus status, String filename) {
+ StepExecution stepExecution = MetaDataInstanceFactory.createStepExecution(stepName, 1L);
+ stepExecution.setStatus(status);
+ stepExecution.getExecutionContext().put("fileName", filename);
+ return stepExecution;
+ }
+
+ @SneakyThrows
+ private Collection getHpanFiles() {
+ return FileUtils.listFiles(
+ resolver.getResources("file:" + tempDir + File.separator + HPAN_PATH)[0].getFile(),
+ null,false);
+ }
+
+ @SneakyThrows
+ private Collection getSuccessFiles() {
+ return FileUtils.listFiles(
+ resolver.getResources("file:" + tempDir + File.separator + SUCCESS_PATH)[0].getFile(),
+ null,false);
+ }
+
+ @SneakyThrows
+ private Collection getErrorFiles() {
+ return FileUtils.listFiles(
+ resolver.getResources("file:" + tempDir + File.separator + ERROR_PATH)[0].getFile(),
+ null,false);
+ }
+
+ @SneakyThrows
+ private Collection getLogFiles() {
+ return FileUtils.listFiles(
+ resolver.getResources("file:" + tempDir + File.separator + LOGS_PATH)[0].getFile(),
+ new String[]{"csv"},false);
+ }
+
+ @SneakyThrows
+ private Collection getPgpOutputFiles() {
+ return FileUtils.listFiles(
+ resolver.getResources("file:" + tempDir + File.separator + OUTPUT_PATH)[0].getFile(),
+ new String[]{"pgp"},false);
+ }
+
+ @SneakyThrows
+ private Collection getCsvOutputFiles() {
+ return FileUtils.listFiles(
+ resolver.getResources("file:" + tempDir + File.separator + OUTPUT_PATH)[0].getFile(),
+ new String[]{"csv"},false);
+ }
+
+ private Collection getPgpPendingFiles() {
+ return FileUtils.listFiles(
+ new File(tempDir + File.separator + PENDING_PATH),
+ new String[]{"pgp"},false);
+ }
+
+ private Condition super List extends String>> getNamePrefixCondition() {
+ String prefixArchivedFileRegex = "[a-zA-Z\\d]{20}_[0-9]{17}_.*";
+ return new Condition<>(list -> list.stream().allMatch(name -> Pattern.matches(prefixArchivedFileRegex, name)),
+ "random name prefix");
+ }
+
+ @SneakyThrows
+ @AfterEach
+ void tearDown() {
+ FileUtils.forceDelete(tempDir.toFile());
+ }
}
\ No newline at end of file
diff --git a/api/batch/src/test/resources/test-encrypt/reports/dummy.csv b/api/batch/src/test/resources/test-encrypt/reports/dummy.csv
new file mode 100644
index 00000000..e69de29b
diff --git a/api/pom.xml b/api/pom.xml
index 026072d2..ea289897 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -4,12 +4,12 @@
rtd-ms-transaction-filter
it.gov.pagopa.rtd.ms
- 1.4.1
+ 1.5.0
it.gov.pagopa.rtd.ms.transaction_filter.api
rtd-ms-transaction-filter-api
- 1.4.1
+ 1.5.0
pom
diff --git a/app/pom.xml b/app/pom.xml
index d1801df3..d759180f 100644
--- a/app/pom.xml
+++ b/app/pom.xml
@@ -4,12 +4,12 @@
rtd-ms-transaction-filter
it.gov.pagopa.rtd.ms
- 1.4.1
+ 1.5.0
it.gov.pagopa.rtd.ms.transaction_filter
transaction-filter-app
- 1.4.1
+ 1.5.0
diff --git a/app/src/main/resources/config/application.yml b/app/src/main/resources/config/application.yml
index 980dcbf7..a3048457 100644
--- a/app/src/main/resources/config/application.yml
+++ b/app/src/main/resources/config/application.yml
@@ -106,11 +106,15 @@ batchConfiguration:
directoryPath: ${ACQ_BATCH_SENDER_ADEACK_OUTPUT_PATH:ade-errors}
transactionWriterAde:
splitThreshold: ${ACQ_BATCH_WRITER_ADE_SPLIT_THRESHOLD:2000000}
+ fileReportRecovery:
+ enabled: ${ACQ_BATCH_FILE_REPORT_RECOVERY_ENABLED:true}
+ directoryPath: ${ACQ_BATCH_FILE_REPORT_PATH:resources/reports}
+ fileNamePrefix: ${ACQ_BATCH_FILE_REPORTS_PREFIX:report-tae}
rest-client:
user-agent:
prefix: BatchService
- version: 1.4.1
+ version: 1.5.0
hpan:
serviceCode: hpan-service
base-url: ${HPAN_SERVICE_URL:https://bpd-dev.azure-api.net:${HPAN_SERVICE_PORT:443}}
@@ -161,6 +165,8 @@ rest-client:
url: /ade/{id}
received:
url: /rtd/file-register/ack-received/{id}
+ file-report:
+ url: /rtd/file-reporter/file-report
feign:
client:
diff --git a/core/pom.xml b/core/pom.xml
index 0e081707..17450a5e 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -4,12 +4,12 @@
rtd-ms-transaction-filter
it.gov.pagopa.rtd.ms
- 1.4.1
+ 1.5.0
it.gov.pagopa.rtd.ms.transaction_filter
rtd-ms-transaction-filter-core
- 1.4.1
+ 1.5.0
diff --git a/entrypoint.sh b/entrypoint.sh
index 8857f469..2e93b431 100644
--- a/entrypoint.sh
+++ b/entrypoint.sh
@@ -8,5 +8,6 @@ mkdir -p $ACQ_BATCH_OUTPUT_PATH
mkdir -p $ACQ_BATCH_TRX_LOGS_PATH
mkdir -p $ACQ_BATCH_HPAN_INPUT_PATH
mkdir -p $ACQ_BATCH_SENDER_ADEACK_OUTPUT_PATH
+mkdir -p $ACQ_BATCH_FILE_REPORT_PATH
java -jar $APP_BIN
diff --git a/integration/jpa/pom.xml b/integration/jpa/pom.xml
index 48bf9cf7..b6853cdb 100644
--- a/integration/jpa/pom.xml
+++ b/integration/jpa/pom.xml
@@ -4,12 +4,12 @@
rtd-ms-transaction-filter-integration
it.gov.pagopa.rtd.ms.transaction_filter
- 1.4.1
+ 1.5.0
it.gov.pagopa.rtd.ms.transaction_filter.integration
rtd-ms-transaction-filter-integration-jpa
- 1.4.1
+ 1.5.0
diff --git a/integration/pom.xml b/integration/pom.xml
index 19e41e98..62b4f2e5 100644
--- a/integration/pom.xml
+++ b/integration/pom.xml
@@ -4,12 +4,12 @@
rtd-ms-transaction-filter
it.gov.pagopa.rtd.ms
- 1.4.1
+ 1.5.0
it.gov.pagopa.rtd.ms.transaction_filter
rtd-ms-transaction-filter-integration
- 1.4.1
+ 1.5.0
pom
diff --git a/integration/rest/pom.xml b/integration/rest/pom.xml
index a0823b90..11f1720a 100644
--- a/integration/rest/pom.xml
+++ b/integration/rest/pom.xml
@@ -4,12 +4,12 @@
it.gov.pagopa.rtd.ms.transaction_filter
rtd-ms-transaction-filter-integration
- 1.4.1
+ 1.5.0
it.gov.pagopa.rtd.ms.transaction_filter.integration
rtd-ms-transaction-filter-integration-rest
- 1.4.1
+ 1.5.0
@@ -37,6 +37,10 @@
com.fasterxml.jackson.core
jackson-databind
+
+ com.fasterxml.jackson.datatype
+ jackson-datatype-jsr310
+
diff --git a/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/FileReportRestClient.java b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/FileReportRestClient.java
new file mode 100644
index 00000000..908d58ef
--- /dev/null
+++ b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/FileReportRestClient.java
@@ -0,0 +1,8 @@
+package it.gov.pagopa.rtd.transaction_filter.connector;
+
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileReport;
+
+public interface FileReportRestClient {
+
+ FileReport getFileReport();
+}
diff --git a/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/FileReportRestClientImpl.java b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/FileReportRestClientImpl.java
new file mode 100644
index 00000000..6ed0ce49
--- /dev/null
+++ b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/FileReportRestClientImpl.java
@@ -0,0 +1,48 @@
+package it.gov.pagopa.rtd.transaction_filter.connector;
+
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileMetadata;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileReport;
+import it.gov.pagopa.rtd.transaction_filter.validator.BasicResponseEntityValidator;
+import java.util.Objects;
+import java.util.Set;
+import javax.validation.ConstraintViolation;
+import javax.validation.ConstraintViolationException;
+import javax.validation.Validator;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+
+@Service
+@Slf4j
+@RequiredArgsConstructor
+class FileReportRestClientImpl implements FileReportRestClient {
+
+ @Value("${rest-client.hpan.api.key}")
+ private String apiKey;
+ private final HpanRestConnector hpanRestConnector;
+ private final BasicResponseEntityValidator reportValidator;
+ private final Validator fileValidator;
+
+ @Override
+ public FileReport getFileReport() {
+ ResponseEntity fileReportResponse = hpanRestConnector.getFileReport(apiKey);
+
+ reportValidator.validate(fileReportResponse);
+ for (FileMetadata file : Objects.requireNonNull(fileReportResponse.getBody()).getFilesRecentlyUploaded()) {
+ validateFileMetadata(file);
+ }
+
+ return fileReportResponse.getBody();
+ }
+
+ private void validateFileMetadata(FileMetadata fileToValidate) {
+ Set> violations = fileValidator.validate(fileToValidate);
+
+ if (!violations.isEmpty()) {
+ log.error("Validation errors in FileReport.");
+ throw new ConstraintViolationException(violations);
+ }
+ }
+}
diff --git a/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestClientImpl.java b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestClientImpl.java
index 80363ae7..e005fbf6 100644
--- a/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestClientImpl.java
+++ b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestClientImpl.java
@@ -283,11 +283,11 @@ public Void uploadFile(File fileToUpload, String sas, String authorizedContainer
httpput.setEntity(entity);
final HttpResponse response = httpclient.execute(httpput);
if (response.getStatusLine().getStatusCode() != HttpStatus.SC_CREATED) {
- throw new IOException("Upload failed for file " + fileToUpload.getName() + " (status was: "
- + response.getStatusLine().getStatusCode() + ")");
+ handleErrorStatus(response.getStatusLine().getStatusCode(), fileToUpload.getName());
+ } else {
+ log.info("File {} uploaded with success (status was: {})", fileToUpload.getName(),
+ response.getStatusLine().getStatusCode());
}
- log.info("File " + fileToUpload.getName() + " uploaded with success (status was: "
- + response.getStatusLine().getStatusCode() + ")");
return null;
}
@@ -300,6 +300,16 @@ private HttpHost createProxy() {
}
}
+ private void handleErrorStatus(int statusCode, String filename) throws IOException {
+ if (statusCode == HttpStatus.SC_CONFLICT) {
+ log.error("Upload failed for file {} (status was {}: File with same name has already been uploaded)",
+ filename, HttpStatus.SC_CONFLICT);
+ } else {
+ throw new IOException("Upload failed for file " + filename + " (status was: "
+ + statusCode + ")");
+ }
+ }
+
public void setValidationDate(LocalDateTime now) {
this.validationDate = now;
}
diff --git a/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestConnector.java b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestConnector.java
index b263fcab..4754eae5 100644
--- a/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestConnector.java
+++ b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestConnector.java
@@ -1,6 +1,7 @@
package it.gov.pagopa.rtd.transaction_filter.connector;
import feign.Param;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileReport;
import java.util.Map;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.core.io.Resource;
@@ -44,4 +45,7 @@ public interface HpanRestConnector {
// @Param placeholder is there only to force Feign to add a 'Content-length' header to the request
ResponseEntity putAckReceived(@RequestHeader("Ocp-Apim-Subscription-Key") String token, @PathVariable(name = "id") String fileName,
@Param("placeholder") String placeholder);
+
+ @GetMapping(value = "${rest-client.file-report.url}")
+ ResponseEntity getFileReport(@RequestHeader("Ocp-Apim-Subscription-Key") String token);
}
diff --git a/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/model/FileMetadata.java b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/model/FileMetadata.java
new file mode 100644
index 00000000..f1ddee00
--- /dev/null
+++ b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/model/FileMetadata.java
@@ -0,0 +1,29 @@
+package it.gov.pagopa.rtd.transaction_filter.connector.model;
+
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateTimeDeserializer;
+import java.time.LocalDateTime;
+import javax.validation.constraints.NotBlank;
+import javax.validation.constraints.NotNull;
+import lombok.Data;
+import org.springframework.format.annotation.DateTimeFormat;
+
+@Data
+public class FileMetadata {
+
+ @NotNull
+ @NotBlank
+ private String name;
+
+ private Long size;
+
+ @NotNull
+ @NotBlank
+ private String status;
+
+ @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
+ @JsonDeserialize(using = LocalDateTimeDeserializer.class)
+ @NotNull
+ private LocalDateTime transmissionDate;
+
+}
diff --git a/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/model/FileReport.java b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/model/FileReport.java
new file mode 100644
index 00000000..31ea79da
--- /dev/null
+++ b/integration/rest/src/main/java/it/gov/pagopa/rtd/transaction_filter/connector/model/FileReport.java
@@ -0,0 +1,10 @@
+package it.gov.pagopa.rtd.transaction_filter.connector.model;
+
+import java.util.List;
+import lombok.Data;
+
+@Data
+public class FileReport {
+
+ List filesRecentlyUploaded;
+}
diff --git a/integration/rest/src/main/resources/config/rest-client.properties b/integration/rest/src/main/resources/config/rest-client.properties
index 8978289d..d11a5f9c 100644
--- a/integration/rest/src/main/resources/config/rest-client.properties
+++ b/integration/rest/src/main/resources/config/rest-client.properties
@@ -35,6 +35,7 @@ rest-client.hpan.abi-to-fiscalcode-map.url=/rtd/abi-to-fiscalcode/conversion-map
rest-client.sender-ade-ack.list.url=/rtd/file-register/sender-ade-ack
rest-client.sender-ade-ack.download-file.url=/ade/{id}
rest-client.sender-ade-ack.received.url=/rtd/file-register/ack-received/{id}
+rest-client.file-report.url=/rtd/file-reporter/file-report
feign.client.config.hpan-service.connectTimeout=${REST_CLIENT_CONNECT_TIMEOUT:${HPAN_REST_CLIENT_CONNECT_TIMEOUT:5000}}
feign.client.config.hpan-service.readTimeout=${REST_CLIENT_READ_TIMEOUT:${HPAN_REST_CLIENT_READ_TIMEOUT:5000}}
feign.client.config.hpan-service.loggerLevel=${REST_CLIENT_LOGGER_LEVEL:${HPAN_REST_CLIENT_LOGGER_LEVEL:FULL}}
diff --git a/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/connector/FileReportRestClientTest.java b/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/connector/FileReportRestClientTest.java
new file mode 100644
index 00000000..d65f1ec6
--- /dev/null
+++ b/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/connector/FileReportRestClientTest.java
@@ -0,0 +1,176 @@
+package it.gov.pagopa.rtd.transaction_filter.connector;
+
+import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo;
+import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.github.tomakehurst.wiremock.extension.responsetemplating.ResponseTemplateTransformer;
+import com.github.tomakehurst.wiremock.junit.WireMockClassRule;
+import feign.FeignException;
+import it.gov.pagopa.rtd.transaction_filter.connector.config.HpanRestConnectorConfig;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileMetadata;
+import it.gov.pagopa.rtd.transaction_filter.connector.model.FileReport;
+import it.gov.pagopa.rtd.transaction_filter.validator.BasicResponseEntityValidator;
+import it.gov.pagopa.rtd.transaction_filter.validator.ValidatorConfig;
+import java.time.LocalDateTime;
+import java.util.Collections;
+import java.util.List;
+import javax.validation.ValidationException;
+import lombok.SneakyThrows;
+import org.assertj.core.util.Lists;
+import org.junit.After;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.http.HttpMessageConvertersAutoConfiguration;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.cloud.openfeign.FeignAutoConfiguration;
+import org.springframework.context.ApplicationContextInitializer;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
+import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.test.context.support.TestPropertySourceUtils;
+
+@RunWith(SpringRunner.class)
+@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE)
+@TestPropertySource(
+ locations = "classpath:config/rest-client.properties",
+ properties = {
+ "rest-client.hpan.list.url=/rtd/payment-instrument-manager/hashed-pans",
+ "rest-client.hpan.salt.url=/rtd/payment-instrument-manager/salt",
+ "rest-client.hpan.adesas.url=/rtd/csv-transaction/ade/sas",
+ "rest-client.hpan.rtdsas.url=/rtd/csv-transaction/rtd/sas",
+ "rest-client.hpan.mtls.enabled=true", "rest-client.hpan.list.checksumHeaderName=checksum",
+ "rest-client.hpan.dateValidation.enabled=true",
+ "rest-client.hpan.list.dateValidationHeaderName=date",
+ "rest-client.hpan.key-store.file=classpath:certs/client-keystore.jks",
+ "rest-client.hpan.key-store.password=secret",
+ "rest-client.hpan.trust-store.file=classpath:certs/client-truststore.jks",
+ "rest-client.hpan.trust-store.password=secret",
+ "spring.application.name=rtd-ms-transaction-filter-integration-rest"})
+@ContextConfiguration(initializers = FileReportRestClientTest.RandomPortInitializer.class, classes = {
+ HpanRestConnectorConfig.class, FileReportRestClientImpl.class,
+ BasicResponseEntityValidator.class, ValidatorConfig.class, HpanRestConnector.class,
+ FeignAutoConfiguration.class, HttpMessageConvertersAutoConfiguration.class})
+public class FileReportRestClientTest {
+
+ @Autowired
+ private FileReportRestClientImpl restClient;
+
+ ObjectMapper mapper = new ObjectMapper();
+
+ @ClassRule
+ public static WireMockClassRule wireMockRule = new WireMockClassRule(wireMockConfig()
+ .dynamicHttpsPort()
+ .dynamicPort()
+ .needClientAuth(true)
+ .keystorePath("src/test/resources/certs/server-keystore.jks")
+ .keystorePassword("secret")
+ .keyManagerPassword("secret")
+ .trustStorePath("src/test/resources/certs/server-truststore.jks")
+ .trustStorePassword("secret")
+ .usingFilesUnderClasspath("stubs")
+ .extensions(new ResponseTemplateTransformer(true))
+ );
+
+ @After
+ public void cleanup() {
+ wireMockRule.resetToDefaultMappings();
+ }
+
+ @SneakyThrows
+ @Test
+ public void whenGetFileReportThenFileNameAndContentMatch() {
+ FileReport fileReport = restClient.getFileReport();
+
+ assertThat(fileReport).isNotNull().extracting(FileReport::getFilesRecentlyUploaded).asList().hasSize(2);
+ assertThat(fileReport.getFilesRecentlyUploaded()).containsAll(getDefaultReport());
+ }
+
+ @SneakyThrows
+ @Test
+ public void whenGetFileReportWithEmptyBodyThenReportIsEmpty() {
+ wireMockRule.stubFor(
+ get(urlPathEqualTo("/rtd/file-reporter/file-report"))
+ .willReturn(
+ aResponse().withBody(mapper.writeValueAsString(getEmptyFileReport()))
+ .withHeader("Content-type", "application/json")));
+
+ FileReport fileReport = restClient.getFileReport();
+
+ assertThat(fileReport).isNotNull().extracting(FileReport::getFilesRecentlyUploaded).asList().isEmpty();
+ }
+
+ @SneakyThrows
+ @Test
+ public void givenStatus404WhenGetFileReportThenThrowException() {
+ wireMockRule.stubFor(
+ get(urlPathEqualTo("/rtd/file-reporter/file-report"))
+ .willReturn(
+ aResponse().withStatus(404)
+ .withBody(mapper.writeValueAsString(getEmptyFileReport()))
+ .withHeader("Content-type", "application/json")));
+
+ assertThatThrownBy(() -> restClient.getFileReport()).isInstanceOf(FeignException.class);
+ }
+
+ @SneakyThrows
+ @Test
+ public void givenMalformedBodyWhenGetFileReportThenThrowException() {
+ wireMockRule.stubFor(
+ get(urlPathEqualTo("/rtd/file-reporter/file-report"))
+ .willReturn(
+ aResponse().withBody(mapper.writeValueAsString(getMalformedReport()))
+ .withHeader("Content-type", "application/json")));
+
+ assertThatThrownBy(() -> restClient.getFileReport()).isInstanceOf(
+ ValidationException.class);
+ }
+
+ private List getDefaultReport() {
+ FileMetadata file1 = new FileMetadata();
+ file1.setName("ADE.file1.pgp");
+ file1.setSize(200L);
+ file1.setStatus("SUCCESS");
+ file1.setTransmissionDate(LocalDateTime.of(2022, 10, 30, 10, 0, 0, 123000000));
+ FileMetadata file2 = new FileMetadata();
+ file2.setName("ADE.file2.pgp");
+ file2.setSize(500L);
+ file2.setStatus("SUCCESS");
+ file2.setTransmissionDate(LocalDateTime.of(2022, 10, 31, 10, 0, 0, 123000000));
+ return Lists.list(file1, file2);
+ }
+
+ private FileReport getEmptyFileReport() {
+ FileReport fileReport = new FileReport();
+ fileReport.setFilesRecentlyUploaded(Collections.emptyList());
+ return fileReport;
+ }
+
+ private FileReport getMalformedReport() {
+ FileReport fileReport = new FileReport();
+ FileMetadata file = new FileMetadata();
+ // missing mandatory fields like filename
+ file.setSize(200L);
+ fileReport.setFilesRecentlyUploaded(Collections.singletonList(file));
+ return fileReport;
+ }
+
+ public static class RandomPortInitializer implements
+ ApplicationContextInitializer {
+
+ @SneakyThrows
+ @Override
+ public void initialize(ConfigurableApplicationContext applicationContext) {
+ TestPropertySourceUtils.addInlinedPropertiesToEnvironment(applicationContext,
+ String.format("rest-client.hpan.base-url=https://localhost:%d/",
+ wireMockRule.httpsPort()));
+ }
+ }
+}
\ No newline at end of file
diff --git a/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestClientTest.java b/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestClientTest.java
index 48b81bec..0e6080e3 100644
--- a/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestClientTest.java
+++ b/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/connector/HpanRestClientTest.java
@@ -1,7 +1,18 @@
package it.gov.pagopa.rtd.transaction_filter.connector;
+import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
+import static com.github.tomakehurst.wiremock.client.WireMock.put;
+import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo;
+import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import static org.assertj.core.api.Assertions.assertThat;
+
import com.github.tomakehurst.wiremock.junit.WireMockClassRule;
import it.gov.pagopa.rtd.transaction_filter.connector.config.HpanRestConnectorConfig;
+import java.io.File;
+import java.io.IOException;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Objects;
import lombok.SneakyThrows;
import org.junit.Assert;
import org.junit.ClassRule;
@@ -13,6 +24,7 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.http.HttpMessageConvertersAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.system.OutputCaptureRule;
import org.springframework.cloud.openfeign.FeignAutoConfiguration;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ConfigurableApplicationContext;
@@ -21,15 +33,6 @@
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.support.TestPropertySourceUtils;
-import java.io.File;
-import java.io.IOException;
-import java.time.LocalDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.Objects;
-
-import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
-
-
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE)
@TestPropertySource(
@@ -71,6 +74,9 @@ public class HpanRestClientTest {
public TemporaryFolder tempFolder = new TemporaryFolder(
new File(Objects.requireNonNull(getClass().getResource("/")).getFile()));
+ @Rule
+ public OutputCaptureRule output = new OutputCaptureRule();
+
@ClassRule
public static WireMockClassRule wireMockRule = new WireMockClassRule(wireMockConfig()
.dynamicHttpsPort()
@@ -157,6 +163,18 @@ public void uploadFileRaisesExceptionWhenSignatureDoesntMatch() throws IOExcepti
hpanRestClient.uploadFile(fileToUpload, "sas-token", "not-authorized-container");
}
+ @Test
+ public void whenUploadFileReturns409ThenLogErrorAndDoNotRaiseException() throws IOException {
+ wireMockRule.stubFor(put(urlPathEqualTo("/pagopastorage/authorized-container/testFile"))
+ .willReturn(aResponse()
+ .withStatus(409)));
+
+ File fileToUpload = tempFolder.newFile("testFile");
+ hpanRestClient.uploadFile(fileToUpload, "sas", "authorized-container");
+
+ assertThat(output).contains("Upload failed", "status was 409");
+ }
+
public static class RandomPortInitializer implements ApplicationContextInitializer {
@SneakyThrows
@Override
diff --git a/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/logger/MaskingPatternLayoutTest.java b/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/logger/MaskingPatternLayoutTest.java
index f6c42d5f..767f39d8 100644
--- a/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/logger/MaskingPatternLayoutTest.java
+++ b/integration/rest/src/test/java/it/gov/pagopa/rtd/transaction_filter/logger/MaskingPatternLayoutTest.java
@@ -38,7 +38,7 @@ void whenLogContainsSaltThenAnonymizeIt(CapturedOutput output) {
String stringWithSalt =
"[HpanRestConnector#getSalt] ---> GET https://api.dev.cstar.pagopa.it/rtd/payment-instrument-manager/v2/salt HTTP/1.1\n"
+ "[HpanRestConnector#getSalt] Ocp-Apim-Subscription-Key: ciao\n"
- + "[HpanRestConnector#getSalt] User-Agent: BatchService/1.4.1\n"
+ + "[HpanRestConnector#getSalt] User-Agent: BatchService/1.5.0\n"
+ "[HpanRestConnector#getSalt] ---> END HTTP (0-byte body)\n"
+ "[HpanRestConnector#getSalt] <--- HTTP/1.1 200 OK (57ms)\n"
+ "[HpanRestConnector#getSalt] connection: keep-alive\n"
diff --git a/integration/rest/src/test/resources/stubs/mappings/fileReport.json b/integration/rest/src/test/resources/stubs/mappings/fileReport.json
new file mode 100644
index 00000000..3b373cc9
--- /dev/null
+++ b/integration/rest/src/test/resources/stubs/mappings/fileReport.json
@@ -0,0 +1,28 @@
+{
+ "request": {
+ "method": "GET",
+ "urlPath": "/rtd/file-reporter/file-report"
+ },
+ "response": {
+ "status": "200",
+ "jsonBody": {
+ "filesRecentlyUploaded": [
+ {
+ "name": "ADE.file1.pgp",
+ "transmissionDate": "2022-10-30T10:00:00.123Z",
+ "size": 200,
+ "status": "SUCCESS"
+ },
+ {
+ "name": "ADE.file2.pgp",
+ "transmissionDate": "2022-10-31T10:00:00.123Z",
+ "size": 500,
+ "status": "SUCCESS"
+ }
+ ]
+ },
+ "headers": {
+ "Content-Type": "application/json"
+ }
+ }
+}
diff --git a/ops_resources/example_config/application.yml b/ops_resources/example_config/application.yml
index 4e973198..231e7010 100644
--- a/ops_resources/example_config/application.yml
+++ b/ops_resources/example_config/application.yml
@@ -105,11 +105,15 @@ batchConfiguration:
directoryPath: ${ACQ_BATCH_SENDER_ADEACK_OUTPUT_PATH:ade-errors}
transactionWriterAde:
splitThreshold: ${ACQ_BATCH_WRITER_ADE_SPLIT_THRESHOLD:2000000}
+ fileReportRecovery:
+ enabled: ${ACQ_BATCH_FILE_REPORT_RECOVERY_ENABLED:true}
+ directoryPath: ${ACQ_BATCH_FILE_REPORT_PATH:resources/reports}
+ fileNamePrefix: ${ACQ_BATCH_FILE_REPORTS_PREFIX:report-tae}
rest-client:
user-agent:
prefix: BatchService
- version: 1.4.1
+ version: 1.5.0
hpan:
serviceCode: hpan-service
base-url: ${HPAN_SERVICE_URL:https://bpd-dev.azure-api.net:${HPAN_SERVICE_PORT:443}}
@@ -160,6 +164,8 @@ rest-client:
url: /ade/{id}
received:
url: /rtd/file-register/ack-received/{id}
+ file-report:
+ url: /rtd/file-reporter/file-report
feign:
client:
diff --git a/ops_resources/example_config/application_hbsql.yml b/ops_resources/example_config/application_hbsql.yml
index 19de8130..e3006ab4 100644
--- a/ops_resources/example_config/application_hbsql.yml
+++ b/ops_resources/example_config/application_hbsql.yml
@@ -83,11 +83,15 @@ batchConfiguration:
directoryPath: ${ACQ_BATCH_SENDER_ADEACK_OUTPUT_PATH:ade-errors}
transactionWriterAde:
splitThreshold: ${ACQ_BATCH_WRITER_ADE_SPLIT_THRESHOLD:2000000}
+ fileReportRecovery:
+ enabled: ${ACQ_BATCH_FILE_REPORT_RECOVERY_ENABLED:true}
+ directoryPath: ${ACQ_BATCH_FILE_REPORT_PATH:resources/reports}
+ fileNamePrefix: ${ACQ_BATCH_FILE_REPORTS_PREFIX:report-tae}
rest-client:
user-agent:
prefix: BatchService
- version: 1.4.1
+ version: 1.5.0
hpan:
serviceCode: hpan-service
base-url: ${HPAN_SERVICE_URL:https://bpd-dev.azure-api.net:${HPAN_SERVICE_PORT:443}}
@@ -138,6 +142,8 @@ rest-client:
url: /ade/{id}
received:
url: /rtd/file-register/ack-received/{id}
+ file-report:
+ url: /rtd/file-reporter/file-report
feign:
client:
diff --git a/pom.xml b/pom.xml
index bcb1d209..3464dcd6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -10,7 +10,7 @@
it.gov.pagopa.rtd.ms
rtd-ms-transaction-filter
- 1.4.1
+ 1.5.0
pom
@@ -33,7 +33,7 @@
UTF-8
**/enums/**,**/model/**,**/Constants*.java,**/*Config.java,**/*Application.java
- 1.4.1
+ 1.5.0