[compat] [controller] add a field largestUsedRTVersionNumber
in store config
#4561
VeniceCI-StaticAnalysisAndUnitTests.yml
on: pull_request
Matrix: Clients / UT & CodeCov
Matrix: Controller / UT & CodeCov
Matrix: Integrations / UT & CodeCov
Matrix: Internal / UT & CodeCov
Matrix: Router / UT & CodeCov
Matrix: Server / UT & CodeCov
Matrix: StaticAnalysis
ValidateGradleWrapper
8s
StaticAnalysisAndUnitTestsCompletionCheck
0s
Annotations
16 errors and 37 warnings
InstanceSizeEstimatorTest.testInstanceMeasurement:
internal/venice-common/src/test/java/com/linkedin/venice/memory/InstanceSizeEstimatorTest.java#L46
java.lang.AssertionError: Memory allocated is negative! memoryAllocatedBeforeInstantiations: 2629302728; memoryAllocatedAfterInstantiations: 2609120552; memoryAllocatedByInstantiations: -20182176; 3 attempts left.
|
InstanceSizeEstimatorTest.testInstanceMeasurement:
internal/venice-common/src/test/java/com/linkedin/venice/memory/InstanceSizeEstimatorTest.java#L46
java.lang.AssertionError: Memory allocated is negative! memoryAllocatedBeforeInstantiations: 2629302728; memoryAllocatedAfterInstantiations: 2609120552; memoryAllocatedByInstantiations: -20182176; 3 attempts left.
|
SITWithPWiseAndBufferAfterLeaderTest.testNotifier[0](AA_ON):
clients/da-vinci-client/src/test/java/com/linkedin/davinci/kafka/consumer/SITWithPWiseAndBufferAfterLeaderTest.java#L1
org.mockito.exceptions.verification.ArgumentsAreDifferent: Argument(s) are different! Wanted:
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
2,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=0, eventTimeEpochMs=-1, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTaskTest.lambda$testNotifier$35(StoreIngestionTaskTest.java:1623)
Actual invocations have different arguments:
storageMetadataService.getLastOffset(
"TestTopic_803b22aa56_e63dbd54_v1",
1
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.getLastOffset(
"TestTopic_803b22aa56_e63dbd54_v1",
2
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.computeStoreVersionState(
"TestTopic_803b22aa56_e63dbd54_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$693/343099047@3368c107
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
1,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_803b22aa56_e63dbd54_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$693/343099047@2955caef
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
1,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
2,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_803b22aa56_e63dbd54_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$693/343099047@717b0aad
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
2,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
1,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.li
|
SITWithPWiseAndBufferAfterLeaderTest.testNotifier[0](AA_ON):
clients/da-vinci-client/src/test/java/com/linkedin/davinci/kafka/consumer/SITWithPWiseAndBufferAfterLeaderTest.java#L1
org.mockito.exceptions.verification.ArgumentsAreDifferent: Argument(s) are different! Wanted:
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
2,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=0, eventTimeEpochMs=-1, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTaskTest.lambda$testNotifier$35(StoreIngestionTaskTest.java:1623)
Actual invocations have different arguments:
storageMetadataService.getLastOffset(
"TestTopic_803b22aa56_e63dbd54_v1",
1
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.getLastOffset(
"TestTopic_803b22aa56_e63dbd54_v1",
2
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.computeStoreVersionState(
"TestTopic_803b22aa56_e63dbd54_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$693/343099047@3368c107
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
1,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_803b22aa56_e63dbd54_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$693/343099047@2955caef
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
1,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
2,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_803b22aa56_e63dbd54_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$693/343099047@717b0aad
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
2,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_803b22aa56_e63dbd54_v1",
1,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118155314, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.li
|
SITWithTWiseAndBufferAfterLeaderTest.testRecordLevelMetricForCurrentVersion[0](false):
clients/da-vinci-client/src/test/java/com/linkedin/davinci/kafka/consumer/SITWithTWiseAndBufferAfterLeaderTest.java#L1
Wanted but not invoked:
hostLevelIngestionStats.recordTotalBytesConsumed(
<any long>
);
-> at com.linkedin.davinci.stats.HostLevelIngestionStats.recordTotalBytesConsumed(HostLevelIngestionStats.java:499)
However, there were exactly 33 interactions with this mock:
hostLevelIngestionStats.recordProcessConsumerActionLatency(
5.0d
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerActions(StoreIngestionTask.java:1969)
hostLevelIngestionStats.recordCheckLongRunningTasksLatency(
0.004067d
);
-> at com.linkedin.davinci.kafka.consumer.LeaderFollowerStoreIngestionTask.checkLongRunningTaskState(LeaderFollowerStoreIngestionTask.java:761)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.recordQuotaMetrics(StoreIngestionTask.java:1635)
hostLevelIngestionStats.recordProcessConsumerActionLatency(
0.0d
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerActions(StoreIngestionTask.java:1969)
hostLevelIngestionStats.recordCheckLongRunningTasksLatency(
0.001833d
);
-> at com.linkedin.davinci.kafka.consumer.LeaderFollowerStoreIngestionTask.checkLongRunningTaskState(LeaderFollowerStoreIngestionTask.java:761)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.recordQuotaMetrics(StoreIngestionTask.java:1635)
hostLevelIngestionStats.recordConsumerRecordsQueuePutLatency(
0.127055d,
1740118082766L
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1302)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1307)
hostLevelIngestionStats.recordConsumerRecordsQueuePutLatency(
0.081041d,
1740118082767L
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1302)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1307)
hostLevelIngestionStats.recordProcessConsumerActionLatency(
0.0d
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerActions(StoreIngestionTask.java:1969)
hostLevelIngestionStats.recordCheckLongRunningTasksLatency(
0.003145d
);
-> at com.linkedin.davinci.kafka.consumer.LeaderFollowerStoreIngestionTask.checkLongRunningTaskState(LeaderFollowerStoreIngestionTask.java:761)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.recordQuotaMetrics(StoreIngestionTask.java:1635)
hostLevelIngestionStats.recordTotalRecordsConsumed();
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerRecord(StoreIngestionTask.java:2625)
hostLevelIngestionStats.recordProcessConsumerActionLatency(
0.0d
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerActions(StoreIngestionTask.java:1969)
hostLevelIngestionStats.recordCheckLongRunningTasksLatency(
0.002605d
);
-> at com.linkedin.davinci.kafka.consumer.LeaderFollowerStoreIngestionTask.checkLongRunningTaskState(LeaderFollowerStoreIngestionTask.java:761)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.recordQuotaMetrics(StoreIngestionTask.java:1635)
hostLevelIngestionStats.recordConsumerRecordsQueuePutLatency(
0.050885d,
1740118082778L
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1302)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1307)
hostLevelIngestionStats.recordTotalRecordsConsumed();
-> at com.linkedin.davinci.kafka.consumer.Stor
|
NativeMetadataRepositoryTest.testNativeMetadataRepositoryStats:
clients/da-vinci-client/src/test/java/com/linkedin/davinci/repository/NativeMetadataRepositoryTest.java#L140
java.lang.AssertionError: expected [2000.0] but found [1000.0]
|
SITWithTWiseAndBufferAfterLeaderTest.testRecordLevelMetricForCurrentVersion[0](false):
clients/da-vinci-client/src/test/java/com/linkedin/davinci/kafka/consumer/SITWithTWiseAndBufferAfterLeaderTest.java#L1
Wanted but not invoked:
hostLevelIngestionStats.recordTotalBytesConsumed(
<any long>
);
-> at com.linkedin.davinci.stats.HostLevelIngestionStats.recordTotalBytesConsumed(HostLevelIngestionStats.java:499)
However, there were exactly 33 interactions with this mock:
hostLevelIngestionStats.recordProcessConsumerActionLatency(
5.0d
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerActions(StoreIngestionTask.java:1969)
hostLevelIngestionStats.recordCheckLongRunningTasksLatency(
0.004067d
);
-> at com.linkedin.davinci.kafka.consumer.LeaderFollowerStoreIngestionTask.checkLongRunningTaskState(LeaderFollowerStoreIngestionTask.java:761)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.recordQuotaMetrics(StoreIngestionTask.java:1635)
hostLevelIngestionStats.recordProcessConsumerActionLatency(
0.0d
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerActions(StoreIngestionTask.java:1969)
hostLevelIngestionStats.recordCheckLongRunningTasksLatency(
0.001833d
);
-> at com.linkedin.davinci.kafka.consumer.LeaderFollowerStoreIngestionTask.checkLongRunningTaskState(LeaderFollowerStoreIngestionTask.java:761)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.recordQuotaMetrics(StoreIngestionTask.java:1635)
hostLevelIngestionStats.recordConsumerRecordsQueuePutLatency(
0.127055d,
1740118082766L
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1302)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1307)
hostLevelIngestionStats.recordConsumerRecordsQueuePutLatency(
0.081041d,
1740118082767L
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1302)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1307)
hostLevelIngestionStats.recordProcessConsumerActionLatency(
0.0d
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerActions(StoreIngestionTask.java:1969)
hostLevelIngestionStats.recordCheckLongRunningTasksLatency(
0.003145d
);
-> at com.linkedin.davinci.kafka.consumer.LeaderFollowerStoreIngestionTask.checkLongRunningTaskState(LeaderFollowerStoreIngestionTask.java:761)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.recordQuotaMetrics(StoreIngestionTask.java:1635)
hostLevelIngestionStats.recordTotalRecordsConsumed();
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerRecord(StoreIngestionTask.java:2625)
hostLevelIngestionStats.recordProcessConsumerActionLatency(
0.0d
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.processConsumerActions(StoreIngestionTask.java:1969)
hostLevelIngestionStats.recordCheckLongRunningTasksLatency(
0.002605d
);
-> at com.linkedin.davinci.kafka.consumer.LeaderFollowerStoreIngestionTask.checkLongRunningTaskState(LeaderFollowerStoreIngestionTask.java:761)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.recordQuotaMetrics(StoreIngestionTask.java:1635)
hostLevelIngestionStats.recordConsumerRecordsQueuePutLatency(
0.050885d,
1740118082778L
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1302)
hostLevelIngestionStats.recordStorageQuotaUsed(
NaNd
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTask.produceToStoreBufferServiceOrKafka(StoreIngestionTask.java:1307)
hostLevelIngestionStats.recordTotalRecordsConsumed();
-> at com.linkedin.davinci.kafka.consumer.Stor
|
NativeMetadataRepositoryTest.testNativeMetadataRepositoryStats:
clients/da-vinci-client/src/test/java/com/linkedin/davinci/repository/NativeMetadataRepositoryTest.java#L140
java.lang.AssertionError: expected [2000.0] but found [1000.0]
|
SITWithTWiseAndBufferAfterLeaderTest.testNotifier[1](AA_OFF):
clients/da-vinci-client/src/test/java/com/linkedin/davinci/kafka/consumer/SITWithTWiseAndBufferAfterLeaderTest.java#L1
org.mockito.exceptions.verification.ArgumentsAreDifferent: Argument(s) are different! Wanted:
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
1,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=0, eventTimeEpochMs=-1, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTaskTest.lambda$testNotifier$35(StoreIngestionTaskTest.java:1621)
Actual invocations have different arguments:
storageMetadataService.getLastOffset(
"TestTopic_7e1e6a6d52_7b6be372_v1",
1
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.getLastOffset(
"TestTopic_7e1e6a6d52_7b6be372_v1",
2
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.computeStoreVersionState(
"TestTopic_7e1e6a6d52_7b6be372_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$1070/0x00007ff99078f798@117edae2
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
2,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_7e1e6a6d52_7b6be372_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$1070/0x00007ff99078f798@1d552580
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
1,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
2,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_7e1e6a6d52_7b6be372_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$1070/0x00007ff99078f798@4dbd3f
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
2,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
1,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerCla
|
SITWithTWiseAndBufferAfterLeaderTest.testNotifier[1](AA_OFF):
clients/da-vinci-client/src/test/java/com/linkedin/davinci/kafka/consumer/SITWithTWiseAndBufferAfterLeaderTest.java#L1
org.mockito.exceptions.verification.ArgumentsAreDifferent: Argument(s) are different! Wanted:
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
2,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=0, eventTimeEpochMs=-1, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTaskTest.lambda$testNotifier$35(StoreIngestionTaskTest.java:1623)
Actual invocations have different arguments:
storageMetadataService.getLastOffset(
"TestTopic_9e34135f65_1a4f643d_v1",
1
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.getLastOffset(
"TestTopic_9e34135f65_1a4f643d_v1",
2
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.computeStoreVersionState(
"TestTopic_9e34135f65_1a4f643d_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$578/0x00007f8a305e48b0@702e553b
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
1,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219685, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
1,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219686, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_9e34135f65_1a4f643d_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$578/0x00007f8a305e48b0@45a844de
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.computeStoreVersionState(
"TestTopic_9e34135f65_1a4f643d_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$578/0x00007f8a305e48b0@20d7c50d
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
1,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219686, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
2,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219685, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
2,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219686, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClas
|
SITWithTWiseAndBufferAfterLeaderTest.testNotifier[1](AA_OFF):
clients/da-vinci-client/src/test/java/com/linkedin/davinci/kafka/consumer/SITWithTWiseAndBufferAfterLeaderTest.java#L1
org.mockito.exceptions.verification.ArgumentsAreDifferent: Argument(s) are different! Wanted:
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
1,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=0, eventTimeEpochMs=-1, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTaskTest.lambda$testNotifier$35(StoreIngestionTaskTest.java:1621)
Actual invocations have different arguments:
storageMetadataService.getLastOffset(
"TestTopic_7e1e6a6d52_7b6be372_v1",
1
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.getLastOffset(
"TestTopic_7e1e6a6d52_7b6be372_v1",
2
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.computeStoreVersionState(
"TestTopic_7e1e6a6d52_7b6be372_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$1070/0x00007ff99078f798@117edae2
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
2,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_7e1e6a6d52_7b6be372_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$1070/0x00007ff99078f798@1d552580
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
1,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
2,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_7e1e6a6d52_7b6be372_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$1070/0x00007ff99078f798@4dbd3f
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
2,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_7e1e6a6d52_7b6be372_v1",
1,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118081878, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerCla
|
SITWithTWiseAndBufferAfterLeaderTest.testNotifier[1](AA_OFF):
clients/da-vinci-client/src/test/java/com/linkedin/davinci/kafka/consumer/SITWithTWiseAndBufferAfterLeaderTest.java#L1
org.mockito.exceptions.verification.ArgumentsAreDifferent: Argument(s) are different! Wanted:
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
2,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=0, eventTimeEpochMs=-1, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.davinci.kafka.consumer.StoreIngestionTaskTest.lambda$testNotifier$35(StoreIngestionTaskTest.java:1623)
Actual invocations have different arguments:
storageMetadataService.getLastOffset(
"TestTopic_9e34135f65_1a4f643d_v1",
1
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.getLastOffset(
"TestTopic_9e34135f65_1a4f643d_v1",
2
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.getLastOffset(DeepCopyOffsetManager.java:49)
storageMetadataService.computeStoreVersionState(
"TestTopic_9e34135f65_1a4f643d_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$578/0x00007f8a305e48b0@702e553b
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
1,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219685, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
1,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219686, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.computeStoreVersionState(
"TestTopic_9e34135f65_1a4f643d_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$578/0x00007f8a305e48b0@45a844de
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.computeStoreVersionState(
"TestTopic_9e34135f65_1a4f643d_v1",
com.linkedin.venice.offsets.DeepCopyStorageMetadataService$$Lambda$578/0x00007f8a305e48b0@20d7c50d
);
-> at com.linkedin.venice.offsets.DeepCopyStorageMetadataService.computeStoreVersionState(DeepCopyStorageMetadataService.java:34)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
1,
OffsetRecord{localVersionTopicOffset=3, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219686, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=true, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
2,
OffsetRecord{localVersionTopicOffset=1, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219685, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClassHash=null}
);
-> at com.linkedin.venice.offsets.DeepCopyOffsetManager.put(DeepCopyOffsetManager.java:38)
storageMetadataService.put(
"TestTopic_9e34135f65_1a4f643d_v1",
2,
OffsetRecord{localVersionTopicOffset=2, upstreamOffset=-1, leaderTopic=null, offsetLag=9223372036854775807, eventTimeEpochMs=1740118219686, latestProducerProcessingTimeInMs=0, isEndOfPushReceived=false, databaseInfo={}, realTimeProducerState={}, recordTransformerClas
|
DispatchingAvroGenericStoreClientTest.testBatchGet:
clients/venice-client/src/test/java/com/linkedin/venice/fastclient/DispatchingAvroGenericStoreClientTest.java#L1
org.testng.internal.thread.ThreadTimeoutException: Method com.linkedin.venice.fastclient.DispatchingAvroGenericStoreClientTest.testBatchGet() didn't finish within the time-out 10000
|
DispatchingAvroGenericStoreClientTest.testBatchGetToUnreachableClient:
clients/venice-client/src/test/java/com/linkedin/venice/fastclient/DispatchingAvroGenericStoreClientTest.java#L1021
java.lang.AssertionError: Cannot invoke "com.linkedin.venice.fastclient.StatsAvroGenericStoreClient.batchGet(com.linkedin.venice.fastclient.BatchGetRequestContext, java.util.Set)" because "this.statsAvroGenericStoreClient" is null expected [true] but found [false]
|
DispatchingAvroGenericStoreClientTest.testBatchGet:
clients/venice-client/src/test/java/com/linkedin/venice/fastclient/DispatchingAvroGenericStoreClientTest.java#L1
org.testng.internal.thread.ThreadTimeoutException: Method com.linkedin.venice.fastclient.DispatchingAvroGenericStoreClientTest.testBatchGet() didn't finish within the time-out 10000
|
DispatchingAvroGenericStoreClientTest.testBatchGetToUnreachableClient:
clients/venice-client/src/test/java/com/linkedin/venice/fastclient/DispatchingAvroGenericStoreClientTest.java#L1021
java.lang.AssertionError: Cannot invoke "com.linkedin.venice.fastclient.StatsAvroGenericStoreClient.batchGet(com.linkedin.venice.fastclient.BatchGetRequestContext, java.util.Set)" because "this.statsAvroGenericStoreClient" is null expected [true] but found [false]
|
Integrations / UT & CodeCov (8)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Integrations / UT & CodeCov (17)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Integrations / UT & CodeCov (17)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Integrations / UT & CodeCov (11)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Integrations / UT & CodeCov (11)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Controller / UT & CodeCov (8)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Controller / UT & CodeCov (8)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Router / UT & CodeCov (8)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Router / UT & CodeCov (8)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Controller / UT & CodeCov (11)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Controller / UT & CodeCov (11)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Router / UT & CodeCov (11)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Router / UT & CodeCov (11)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Controller / UT & CodeCov (17)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Controller / UT & CodeCov (17)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
StaticAnalysis (17)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
StaticAnalysis (17)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Router / UT & CodeCov (17)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Router / UT & CodeCov (17)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Internal / UT & CodeCov (17)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Internal / UT & CodeCov (17)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Internal / UT & CodeCov (11)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Internal / UT & CodeCov (11)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Internal / UT & CodeCov (8)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Internal / UT & CodeCov (8)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Server / UT & CodeCov (8)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Server / UT & CodeCov (8)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Server / UT & CodeCov (11)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Server / UT & CodeCov (11)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Server / UT & CodeCov (17)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Server / UT & CodeCov (17)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Clients / UT & CodeCov (8)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Clients / UT & CodeCov (8)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Clients / UT & CodeCov (11)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Clients / UT & CodeCov (11)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Clients / UT & CodeCov (17)
Cache not found for keys: setup-java-Linux-x64-gradle-45697ee449aafa149b966f7c6539bb74113f04f6dff30b294a33a925b3b6d068
|
Clients / UT & CodeCov (17)
Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists
|
Artifacts
Produced during runtime
Name | Size | |
---|---|---|
StaticAnalysis
|
665 KB |
|
clients-jdk11
|
2.57 MB |
|
clients-jdk17
|
2.62 MB |
|
clients-jdk8
|
2.54 MB |
|
controller-jdk11
|
1.62 MB |
|
controller-jdk17
|
1.56 MB |
|
controller-jdk8
|
1.61 MB |
|
integrations-jdk11
|
542 KB |
|
integrations-jdk17
|
550 KB |
|
integrations-jdk8
|
532 KB |
|
internal-jdk11
|
3.54 MB |
|
internal-jdk17
|
3.55 MB |
|
internal-jdk8
|
3.53 MB |
|
router-jdk11
|
1.05 MB |
|
router-jdk17
|
1.05 MB |
|
router-jdk8
|
1.04 MB |
|
server-jdk11
|
9.47 MB |
|
server-jdk17
|
9.2 MB |
|
server-jdk8
|
9.44 MB |
|