diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java index a552b13aa5f84..e2937abd8ae93 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -9,7 +9,6 @@ package org.opensearch.common.cache.stats; import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.core.common.io.stream.Writeable; /** * Interface for access to any cache stats. Allows accessing stats by dimension values. @@ -18,7 +17,7 @@ * @opensearch.experimental */ @ExperimentalApi -public interface CacheStats extends Writeable {// TODO: also extends ToXContentFragment (in API PR) +public interface CacheStats { // TODO: also extends Writeable, ToXContentFragment (in API PR) // Method to get all 5 values at once CacheStatsCounterSnapshot getTotalStats(); diff --git a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java index 1cb604b80e530..3fc5d54b5dcbe 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java @@ -8,11 +8,6 @@ package org.opensearch.common.cache.stats; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; - -import java.io.IOException; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -35,72 +30,6 @@ public MultiDimensionCacheStats(MDCSDimensionNode statsRoot, List dimens this.dimensionNames = dimensionNames; } - public MultiDimensionCacheStats(StreamInput in) throws IOException { - // Because we write in preorder order, the parent of the next node we read will always be one of the ancestors - // of the last node we read. This allows us to avoid ambiguity if nodes have the same dimension value, without - // having to serialize the whole path to each node. - this.dimensionNames = List.of(in.readStringArray()); - this.statsRoot = new MDCSDimensionNode("", true); - readAndAttachDimensionNodeRecursive(in, List.of(statsRoot)); - // Finally, update sum-of-children stats for the root node - CacheStatsCounter totalStats = new CacheStatsCounter(); - for (MDCSDimensionNode child : statsRoot.children.values()) { - totalStats.add(child.getStats()); - } - statsRoot.setStats(totalStats.snapshot()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - // Write each node in preorder order, along with its depth. - // Then, when rebuilding the tree from the stream, we can always find the correct parent to attach each node to. - out.writeStringArray(dimensionNames.toArray(new String[0])); - for (MDCSDimensionNode child : statsRoot.children.values()) { - writeDimensionNodeRecursive(out, child, 1); - } - out.writeBoolean(false); // Write false to signal there are no more nodes - } - - private void writeDimensionNodeRecursive(StreamOutput out, MDCSDimensionNode node, int depth) throws IOException { - out.writeBoolean(true); // Signals there is a following node to deserialize - out.writeVInt(depth); - out.writeString(node.getDimensionValue()); - node.getStats().writeTo(out); - - if (!node.children.isEmpty()) { - // Not a leaf node - out.writeBoolean(true); // Write true to indicate we should re-create a map on deserialization - for (MDCSDimensionNode child : node.children.values()) { - writeDimensionNodeRecursive(out, child, depth + 1); - } - } else { - out.writeBoolean(false); // Write false to indicate we should not re-create a map on deserialization - } - } - - /** - * Reads a serialized dimension node, attaches it to its appropriate place in the tree, and returns the list of - * ancestors of the newly attached node. - */ - private void readAndAttachDimensionNodeRecursive(StreamInput in, List ancestorsOfLastRead) // List - throws IOException { - boolean hasNextNode = in.readBoolean(); - if (hasNextNode) { - int depth = in.readVInt(); - String nodeDimensionValue = in.readString(); - CacheStatsCounterSnapshot stats = new CacheStatsCounterSnapshot(in); - boolean doRecreateMap = in.readBoolean(); - - MDCSDimensionNode result = new MDCSDimensionNode(nodeDimensionValue, doRecreateMap, stats); - MDCSDimensionNode parent = ancestorsOfLastRead.get(depth - 1); - parent.getChildren().put(nodeDimensionValue, result); - List ancestors = new ArrayList<>(ancestorsOfLastRead.subList(0, depth)); - ancestors.add(result); - readAndAttachDimensionNodeRecursive(in, ancestors); - } - // If !hasNextNode, there are no more nodes, so we are done - } - @Override public CacheStatsCounterSnapshot getTotalStats() { return statsRoot.getStats(); diff --git a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java index ca5f79eb46958..460398961d94f 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java @@ -8,38 +8,46 @@ package org.opensearch.common.cache.stats; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.common.io.stream.BytesStreamInput; import org.opensearch.test.OpenSearchTestCase; -import java.util.ArrayList; import java.util.List; import java.util.Map; public class MultiDimensionCacheStatsTests extends OpenSearchTestCase { - public void testSerialization() throws Exception { - List dimensionNames = List.of("dim1", "dim2", "dim3"); + + public void testGet() throws Exception { + List dimensionNames = List.of("dim1", "dim2", "dim3", "dim4"); StatsHolder statsHolder = new StatsHolder(dimensionNames); Map> usedDimensionValues = StatsHolderTests.getUsedDimensionValues(statsHolder, 10); - StatsHolderTests.populateStats(statsHolder, usedDimensionValues, 100, 10); + Map, CacheStatsCounter> expected = StatsHolderTests.populateStats(statsHolder, usedDimensionValues, 1000, 10); MultiDimensionCacheStats stats = (MultiDimensionCacheStats) statsHolder.getCacheStats(); - BytesStreamOutput os = new BytesStreamOutput(); - stats.writeTo(os); - BytesStreamInput is = new BytesStreamInput(BytesReference.toBytes(os.bytes())); - MultiDimensionCacheStats deserialized = new MultiDimensionCacheStats(is); + // test the value in the map is as expected for each distinct combination of values + for (List dimensionValues : expected.keySet()) { + CacheStatsCounter expectedCounter = expected.get(dimensionValues); + + CacheStatsCounterSnapshot actualStatsHolder = StatsHolderTests.getNode(dimensionValues, statsHolder.getStatsRoot()) + .getStatsSnapshot(); + CacheStatsCounterSnapshot actualCacheStats = getNode(dimensionValues, stats.getStatsRoot()).getStats(); + + assertEquals(expectedCounter.snapshot(), actualStatsHolder); + assertEquals(expectedCounter.snapshot(), actualCacheStats); + } - assertEquals(stats.dimensionNames, deserialized.dimensionNames); - List> pathsInOriginal = new ArrayList<>(); - getAllPathsInTree(stats.getStatsRoot(), new ArrayList<>(), pathsInOriginal); - for (List path : pathsInOriginal) { - MultiDimensionCacheStats.MDCSDimensionNode originalNode = getNode(path, stats.statsRoot); - MultiDimensionCacheStats.MDCSDimensionNode deserializedNode = getNode(path, deserialized.statsRoot); - assertNotNull(deserializedNode); - assertEquals(originalNode.getDimensionValue(), deserializedNode.getDimensionValue()); - assertEquals(originalNode.getStats(), deserializedNode.getStats()); + // test gets for total (this also checks sum-of-children logic) + CacheStatsCounter expectedTotal = new CacheStatsCounter(); + for (List dims : expected.keySet()) { + expectedTotal.add(expected.get(dims)); } + assertEquals(expectedTotal.snapshot(), stats.getTotalStats()); + + assertEquals(expectedTotal.getHits(), stats.getTotalHits()); + assertEquals(expectedTotal.getMisses(), stats.getTotalMisses()); + assertEquals(expectedTotal.getEvictions(), stats.getTotalEvictions()); + assertEquals(expectedTotal.getSizeInBytes(), stats.getTotalSizeInBytes()); + assertEquals(expectedTotal.getEntries(), stats.getTotalEntries()); + + assertSumOfChildrenStats(stats.getStatsRoot()); } public void testEmptyDimsList() throws Exception { @@ -54,22 +62,6 @@ public void testEmptyDimsList() throws Exception { assertEquals(stats.getTotalStats(), statsRoot.getStats()); } - private void getAllPathsInTree( - MultiDimensionCacheStats.MDCSDimensionNode currentNode, - List pathToCurrentNode, - List> allPaths - ) { - allPaths.add(pathToCurrentNode); - if (currentNode.getChildren() != null && !currentNode.getChildren().isEmpty()) { - // not a leaf node - for (MultiDimensionCacheStats.MDCSDimensionNode child : currentNode.getChildren().values()) { - List pathToChild = new ArrayList<>(pathToCurrentNode); - pathToChild.add(child.getDimensionValue()); - getAllPathsInTree(child, pathToChild, allPaths); - } - } - } - private MultiDimensionCacheStats.MDCSDimensionNode getNode( List dimensionValues, MultiDimensionCacheStats.MDCSDimensionNode root @@ -83,4 +75,17 @@ private MultiDimensionCacheStats.MDCSDimensionNode getNode( } return current; } + + private void assertSumOfChildrenStats(MultiDimensionCacheStats.MDCSDimensionNode current) { + if (!current.children.isEmpty()) { + CacheStatsCounter expectedTotal = new CacheStatsCounter(); + for (MultiDimensionCacheStats.MDCSDimensionNode child : current.children.values()) { + expectedTotal.add(child.getStats()); + } + assertEquals(expectedTotal.snapshot(), current.getStats()); + for (MultiDimensionCacheStats.MDCSDimensionNode child : current.children.values()) { + assertSumOfChildrenStats(child); + } + } + } } diff --git a/server/src/test/java/org/opensearch/common/cache/stats/StatsHolderTests.java b/server/src/test/java/org/opensearch/common/cache/stats/StatsHolderTests.java index 707fe2b926c0b..d351572e05d74 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/StatsHolderTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/StatsHolderTests.java @@ -187,7 +187,6 @@ static Map, CacheStatsCounter> populateStats( int numRepetitionsPerValue ) throws InterruptedException { Map, CacheStatsCounter> expected = new ConcurrentHashMap<>(); - Thread[] threads = new Thread[numDistinctValuePairs]; CountDownLatch countDownLatch = new CountDownLatch(numDistinctValuePairs); Random rand = Randomness.get(); @@ -196,42 +195,23 @@ static Map, CacheStatsCounter> populateStats( dimensionsForThreads.add(getRandomDimList(statsHolder.getDimensionNames(), usedDimensionValues, true, rand)); int finalI = i; threads[i] = new Thread(() -> { - Random threadRand = Randomness.get(); // TODO: This always has the same seed for each thread, causing only 1 set of values + Random threadRand = Randomness.get(); List dimensions = dimensionsForThreads.get(finalI); expected.computeIfAbsent(dimensions, (key) -> new CacheStatsCounter()); - for (int j = 0; j < numRepetitionsPerValue; j++) { - int numHitIncrements = threadRand.nextInt(10); - for (int k = 0; k < numHitIncrements; k++) { - statsHolder.incrementHits(dimensions); - expected.get(dimensions).hits.inc(); - } - int numMissIncrements = threadRand.nextInt(10); - for (int k = 0; k < numMissIncrements; k++) { - statsHolder.incrementMisses(dimensions); - expected.get(dimensions).misses.inc(); - } - int numEvictionIncrements = threadRand.nextInt(10); - for (int k = 0; k < numEvictionIncrements; k++) { - statsHolder.incrementEvictions(dimensions); - expected.get(dimensions).evictions.inc(); - } - int numMemorySizeIncrements = threadRand.nextInt(10); - for (int k = 0; k < numMemorySizeIncrements; k++) { - long memIncrementAmount = threadRand.nextInt(5000); - statsHolder.incrementSizeInBytes(dimensions, memIncrementAmount); - expected.get(dimensions).sizeInBytes.inc(memIncrementAmount); - } - int numEntryIncrements = threadRand.nextInt(9) + 1; - for (int k = 0; k < numEntryIncrements; k++) { - statsHolder.incrementEntries(dimensions); - expected.get(dimensions).entries.inc(); - } - int numEntryDecrements = threadRand.nextInt(numEntryIncrements); - for (int k = 0; k < numEntryDecrements; k++) { - statsHolder.decrementEntries(dimensions); - expected.get(dimensions).entries.dec(); - } + CacheStatsCounter statsToInc = new CacheStatsCounter( + threadRand.nextInt(10), + threadRand.nextInt(10), + threadRand.nextInt(10), + threadRand.nextInt(5000), + threadRand.nextInt(10) + ); + expected.get(dimensions).hits.inc(statsToInc.getHits()); + expected.get(dimensions).misses.inc(statsToInc.getMisses()); + expected.get(dimensions).evictions.inc(statsToInc.getEvictions()); + expected.get(dimensions).sizeInBytes.inc(statsToInc.getSizeInBytes()); + expected.get(dimensions).entries.inc(statsToInc.getEntries()); + StatsHolderTests.populateStatsHolderFromStatsValueMap(statsHolder, Map.of(dimensions, statsToInc)); } countDownLatch.countDown(); }); @@ -284,4 +264,24 @@ private void assertSumOfChildrenStats(DimensionNode current) { } } } + + static void populateStatsHolderFromStatsValueMap(StatsHolder statsHolder, Map, CacheStatsCounter> statsMap) { + for (Map.Entry, CacheStatsCounter> entry : statsMap.entrySet()) { + CacheStatsCounter stats = entry.getValue(); + List dims = entry.getKey(); + for (int i = 0; i < stats.getHits(); i++) { + statsHolder.incrementHits(dims); + } + for (int i = 0; i < stats.getMisses(); i++) { + statsHolder.incrementMisses(dims); + } + for (int i = 0; i < stats.getEvictions(); i++) { + statsHolder.incrementEvictions(dims); + } + statsHolder.incrementSizeInBytes(dims, stats.getSizeInBytes()); + for (int i = 0; i < stats.getEntries(); i++) { + statsHolder.incrementEntries(dims); + } + } + } }