diff --git a/arex-storage-config/pom.xml b/arex-storage-config/pom.xml
index 7c383661..baf658e0 100644
--- a/arex-storage-config/pom.xml
+++ b/arex-storage-config/pom.xml
@@ -45,7 +45,7 @@
arex-storage-service
com.arextest
- 1.0.44
+ 1.0.45
diff --git a/arex-storage-model/pom.xml b/arex-storage-model/pom.xml
index 20658cd2..2fee61af 100644
--- a/arex-storage-model/pom.xml
+++ b/arex-storage-model/pom.xml
@@ -7,7 +7,7 @@
arex-storage-service
com.arextest
- 1.0.44
+ 1.0.45
diff --git a/arex-storage-web-api/pom.xml b/arex-storage-web-api/pom.xml
index 30e1f047..4a62a9f2 100644
--- a/arex-storage-web-api/pom.xml
+++ b/arex-storage-web-api/pom.xml
@@ -123,7 +123,7 @@
arex-storage-service
com.arextest
- 1.0.44
+ 1.0.45
diff --git a/arex-storage-web-api/src/main/java/com/arextest/storage/mock/MockResultProvider.java b/arex-storage-web-api/src/main/java/com/arextest/storage/mock/MockResultProvider.java
index f59a313f..0e43cc79 100644
--- a/arex-storage-web-api/src/main/java/com/arextest/storage/mock/MockResultProvider.java
+++ b/arex-storage-web-api/src/main/java/com/arextest/storage/mock/MockResultProvider.java
@@ -3,7 +3,6 @@
import com.arextest.model.mock.MockCategoryType;
import com.arextest.model.mock.Mocker;
import java.util.List;
-import java.util.Map;
import javax.validation.constraints.NotNull;
/**
diff --git a/arex-storage-web-api/src/main/java/com/arextest/storage/mock/impl/DefaultMockResultProviderImpl.java b/arex-storage-web-api/src/main/java/com/arextest/storage/mock/impl/DefaultMockResultProviderImpl.java
index b1aa6f27..8d257893 100644
--- a/arex-storage-web-api/src/main/java/com/arextest/storage/mock/impl/DefaultMockResultProviderImpl.java
+++ b/arex-storage-web-api/src/main/java/com/arextest/storage/mock/impl/DefaultMockResultProviderImpl.java
@@ -73,7 +73,7 @@ final class DefaultMockResultProviderImpl implements MockResultProvider {
/**
* 1. Store recorded data and matching keys in redis 2. The mock type associated with dubbo, which
- * needs to record the maximum number of replays
+ * needs to record the maximum number of replays 3. renewal cache
*/
@Override
public boolean putRecordResult(MockCategoryType category, String recordId,
@@ -86,18 +86,26 @@ public boolean putRecordResult(MockCategoryType category, Str
// Records the maximum number of operations corresponding to recorded data
List mockList = new ArrayList<>();
+ // key: Redis keys that need to be counted. value: The number of redis keys
+ Map mockSequenceKeyMaps = Maps.newHashMap();
// Obtain the number of the same interfaces in recorded data
while (valueIterator.hasNext()) {
T value = valueIterator.next();
mockList.add(value);
- if (shouldBuildRecordOperationKey(value) || shouldRecordCallReplayMax) {
- byte[] recordOperationKey = CacheKeyUtils.buildRecordOperationKey(category, recordId,
- getOperationNameWithCategory(value));
- nextSequence(recordOperationKey);
+ // Determine whether grouping through operation is necessary
+ if (!shouldBuildRecordOperationKey(value) && !shouldRecordCallReplayMax) {
+ continue;
}
+ byte[] recordOperationKey = CacheKeyUtils.buildRecordOperationKey(category, recordId,
+ getOperationNameWithCategory(value));
+ int count = updateMapsAndGetCount(mockSequenceKeyMaps, recordOperationKey);
+ LOGGER.info("update record operation cache, count: {}, operation: {}", count, getOperationNameWithCategory(value));
+ putRedisValue(recordOperationKey, count);
}
mockList.sort(Comparator.comparing(Mocker::getCreationTime));
- for (T value : mockList) {
+ int mockListSize = mockList.size();
+ for (int sequence = 1; sequence <= mockListSize; sequence++) {
+ T value = mockList.get(sequence - 1);
// Place the maximum number of playback times corresponding to the operations into the recorded data
if (shouldRecordCallReplayMax) {
byte[] recordOperationKey = CacheKeyUtils.buildRecordOperationKey(category, recordId,
@@ -108,28 +116,41 @@ public boolean putRecordResult(MockCategoryType category, Str
targetResponse.setAttribute(CALL_REPLAY_MAX, count);
}
}
- size = sequencePutRecordData(category, recordIdBytes, size, recordKey, value);
+ size = sequencePutRecordData(category, recordIdBytes, size, recordKey, value, sequence, mockSequenceKeyMaps);
}
+ LOGGER.info("update record cache, count: {}, recordId: {}, category: {}", mockListSize, recordId, category);
+ putRedisValue(recordKey, mockListSize);
LOGGER.info("put record result to cache size:{} for category:{},record id:{}", size, category,
recordId);
return size > EMPTY_SIZE;
}
+ private void putRedisValue(byte[] recordOperationKey, int count) {
+ redisCacheProvider.put(recordOperationKey, cacheExpiredSeconds, CacheKeyUtils.toUtf8Bytes(
+ String.valueOf(count)));
+ }
+
private int sequencePutRecordData(MockCategoryType category,
- byte[] recordIdBytes, int size, byte[] recordKey, T value) {
+ byte[] recordIdBytes, int size, byte[] recordKey, T value, int sequence,
+ Map mockSequenceKeyMaps) {
if (useEigenMatch && MapUtils.isEmpty(value.getEigenMap())) {
calculateEigen(value);
}
List mockKeyList = matchKeyFactory.build(value);
final byte[] zstdValue = serializer.serialize(value);
- byte[] valueRefKey = sequencePut(recordKey, zstdValue);
+ byte[] valueRefKey = sequencePut(recordKey, zstdValue, sequence);
+ LOGGER.info("update record sequence cache, count: {}", sequence);
if (valueRefKey == null) {
return size;
}
for (int i = 0; i < mockKeyList.size(); i++) {
byte[] mockKeyBytes = mockKeyList.get(i);
byte[] key = CacheKeyUtils.buildRecordKey(category, recordIdBytes, mockKeyBytes);
- byte[] sequenceKey = sequencePut(key, valueRefKey);
+ int count = updateMapsAndGetCount(mockSequenceKeyMaps, key);
+ LOGGER.info("update record mock key cache, count: {}, mock index: {}, operation: {}",
+ count, i, value.getOperationName());
+ putRedisValue(key, count);
+ byte[] sequenceKey = sequencePut(key, valueRefKey, count);
if (sequenceKey != null) {
size++;
}
@@ -142,6 +163,36 @@ private int sequencePutRecordData(MockCategoryType category,
return size;
}
+ /**
+ * Obtain the corresponding value in the maps through the key and update it
+ */
+ private int updateMapsAndGetCount(Map maps, byte[] key) {
+ int count = 1;
+ byte[] mapKey = getKeyByTargetKey(maps, key);
+ if (mapKey == null) {
+ maps.put(key, count);
+ return count;
+ }
+ count = maps.get(mapKey) + 1;
+ maps.put(mapKey, count);
+ return count;
+ }
+
+ /**
+ * Obtain the key that matches the content in the maps through the target key
+ */
+ private byte[] getKeyByTargetKey(Map maps, byte[] targetKey) {
+ if (MapUtils.isEmpty(maps)) {
+ return null;
+ }
+ for (byte[] key : maps.keySet()) {
+ if (Arrays.equals(key, targetKey)) {
+ return key;
+ }
+ }
+ return null;
+ }
+
private boolean shouldUseIdOfInstanceToMockResult(MockCategoryType category) {
return !category.isEntryPoint();
}
@@ -267,12 +318,26 @@ private byte[] sequencePut(final byte[] key, final byte[] zstdValue) {
return sequenceKey;
}
} catch (Throwable throwable) {
- LOGGER.error("put error:{} sequence:{} for base64 key:{}",
+ LOGGER.error("redis put error:{} sequence:{} for base64 key:{}",
throwable.getMessage(), next, CompressionUtils.encodeToBase64String(key), throwable);
}
return null;
}
+ private byte[] sequencePut(final byte[] key, final byte[] zstdValue, int sequence) {
+ try {
+ final byte[] sequenceKey = createSequenceKey(key, sequence);
+ boolean retResult = redisCacheProvider.put(sequenceKey, cacheExpiredSeconds, zstdValue);
+ if (retResult) {
+ return sequenceKey;
+ }
+ } catch (Throwable throwable) {
+ LOGGER.error("redis put error::{} sequence:{} for base64 key:{}",
+ throwable.getMessage(), sequence, CompressionUtils.encodeToBase64String(key), throwable);
+ }
+ return null;
+ }
+
/**
* sequence query for record result,if consume overhead the total,we use last one instead as
* return.
@@ -671,7 +736,8 @@ public byte[] getMockResultWithEigenMatch(MockCategoryType category,
}
boolean tryFindLastValue =
context.getMockStrategy() == MockResultMatchStrategy.TRY_FIND_LAST_VALUE;
-
+ LOGGER.info(
+ "[[title=eigenMatch]]get mock result with eigen match, recordDataCount: {}", count);
// 3.1 iterate over all records, calculating the eigen between replay requests and record requests.
// invocationMap: Map>>
Map>> invocationMap = Maps.newHashMap();
diff --git a/arex-storage-web-api/src/main/java/com/arextest/storage/service/PrepareMockResultService.java b/arex-storage-web-api/src/main/java/com/arextest/storage/service/PrepareMockResultService.java
index 149152f2..3475b7f9 100644
--- a/arex-storage-web-api/src/main/java/com/arextest/storage/service/PrepareMockResultService.java
+++ b/arex-storage-web-api/src/main/java/com/arextest/storage/service/PrepareMockResultService.java
@@ -52,9 +52,9 @@ private boolean preload(RepositoryProvider extends Mocker> repositoryReader,
if (repositoryReader == null) {
return false;
}
- if (mockResultProvider.recordResultCount(categoryType, recordId) > 0) {
- LOGGER.warn("skip preload cache for category:{},record id:{}", categoryType, recordId);
- return true;
+ int resultCount = mockResultProvider.recordResultCount(categoryType, recordId);
+ if (resultCount > 0) {
+ LOGGER.info("preload cache for category:{},record id:{},count:{}", categoryType, recordId, resultCount);
}
Iterable extends Mocker> iterable;
iterable = repositoryReader.queryRecordList(categoryType, recordId);
diff --git a/pom.xml b/pom.xml
index a3e27363..11929562 100644
--- a/pom.xml
+++ b/pom.xml
@@ -408,5 +408,5 @@
https://github.com/arextest/arex-storage
- 1.0.44
+ 1.0.45
\ No newline at end of file