From 641abf1287f40f81611652c446e1704b50edb5fe Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Thu, 8 Feb 2024 11:36:49 -0800 Subject: [PATCH 01/32] CacheStats interface and SingleDimensionCacheStats impl Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 35 ++- .../common/cache/stats/CacheStats.java | 26 +- .../cache/stats/CacheStatsDimension.java | 18 ++ .../stats/SingleDimensionCacheStats.java | 231 ++++++++++++++++++ .../cache/store/OpenSearchOnHeapCache.java | 9 +- .../cache/tier/TieredSpilloverCache.java | 10 +- .../stats/SingleDimensionCacheStatsTests.java | 171 +++++++++++++ 7 files changed, 469 insertions(+), 31 deletions(-) create mode 100644 server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java create mode 100644 server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java create mode 100644 server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 91635dda1f668..725d27fc479de 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -18,6 +18,7 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; @@ -98,7 +99,7 @@ public class EhcacheDiskCache implements StoreAwareCache { private final Class keyType; private final Class valueType; private final TimeValue expireAfterAccess; - private final DiskCacheStats stats = new DiskCacheStats(); + private final CacheStats stats; private final EhCacheEventListener ehCacheEventListener; private final String threadPoolAlias; private final Settings settings; @@ -141,6 +142,7 @@ private EhcacheDiskCache(Builder builder) { this.eventListener = builder.getEventListener(); this.ehCacheEventListener = new EhCacheEventListener(builder.getEventListener()); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); + this.stats = new SingleDimensionCacheStats(builder.shardIdDimensionName); } private Cache buildCache(Duration expireAfterAccess, Builder builder) { @@ -377,7 +379,7 @@ public Iterable keys() { */ @Override public long count() { - return stats.count(); + return stats.getTotalEntries(); } @Override @@ -414,17 +416,6 @@ public CacheStoreType getTierType() { return CacheStoreType.DISK; } - /** - * Stats related to disk cache. - */ - static class DiskCacheStats implements CacheStats { - private final CounterMetric count = new CounterMetric(); - - @Override - public long count() { - return count.count(); - } - } /** * This iterator wraps ehCache iterator and only iterates over its keys. @@ -466,8 +457,8 @@ class EhCacheEventListener implements CacheEventListener { } @Override - public void onEvent(CacheEvent event) { - switch (event.getType()) { + public void onEvent(CacheEvent event) { } + /*switch (event.getType()) { case CREATED: stats.count.inc(); this.eventListener.onCached(event.getKey(), event.getNewValue(), CacheStoreType.DISK); @@ -514,13 +505,13 @@ public void onEvent(CacheEvent event) { default: break; } - } + }*/ } /** * Factory to create an ehcache disk cache. */ - public static class EhcacheDiskCacheFactory implements StoreAwareCache.Factory { + public class EhcacheDiskCacheFactory implements StoreAwareCache.Factory { /** * Ehcache disk cache name. @@ -539,7 +530,7 @@ public StoreAwareCache create(StoreAwareCacheConfig config, C Setting stringSetting = DISK_STORAGE_PATH_SETTING.getConcreteSettingForNamespace( CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() ); - return new Builder().setStoragePath((String) settingList.get(DISK_STORAGE_PATH_KEY).get(settings)) + return new EhcacheDiskCache.Builder().setStoragePath((String) settingList.get(DISK_STORAGE_PATH_KEY).get(settings)) .setDiskCacheAlias((String) settingList.get(DISK_CACHE_ALIAS_KEY).get(settings)) .setCacheType(cacheType) .setKeyType((config.getKeyType())) @@ -562,7 +553,7 @@ public String getCacheName() { * @param Type of key * @param Type of value */ - public static class Builder extends StoreAwareCacheBuilder { + public class Builder extends StoreAwareCacheBuilder { private CacheType cacheType; private String storagePath; @@ -577,6 +568,7 @@ public static class Builder extends StoreAwareCacheBuilder { private Class keyType; private Class valueType; + private String shardIdDimensionName; /** * Default constructor. Added to fix javadocs. @@ -653,6 +645,11 @@ public Builder setIsEventListenerModeSync(boolean isEventListenerModeSync) return this; } + public Builder setShardIdDimensionName(String dimensionName) { + this.shardIdDimensionName = dimensionName; + return this; + } + @Override public EhcacheDiskCache build() { return new EhcacheDiskCache<>(this); diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java index cf84f296916fb..820c530df462a 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -8,11 +8,31 @@ package org.opensearch.common.cache.stats; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContentFragment; + +import java.util.List; + /** * Interface for any cache specific stats. * TODO: Add rest of stats like hits/misses. */ -public interface CacheStats { - // Provides the current number of entries in cache. - long count(); +public interface CacheStats extends Writeable { // TODO: Make this extend ToXContentFragment too + long getTotalHits(); + long getTotalMisses(); + long getTotalEvictions(); + long getTotalMemorySize(); + long getTotalEntries(); + long getHitsByDimension(CacheStatsDimension dimension); + long getMissesByDimension(CacheStatsDimension dimension); + long getEvictionsByDimension(CacheStatsDimension dimension); + long getMemorySizeByDimension(CacheStatsDimension dimension); + long getEntriesByDimension(CacheStatsDimension dimension); + + void incrementHitsByDimensions(List dimensions); + void incrementMissesByDimensions(List dimensions); + void incrementEvictionsByDimensions(List dimensions); + void incrementMemorySizeByDimensions(List dimensions, long amountBytes); + void incrementEntriesByDimensions(List dimensions); + } diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java new file mode 100644 index 0000000000000..83e022325e4bb --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java @@ -0,0 +1,18 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +public class CacheStatsDimension { + public final String dimensionName; + public final String dimensionValue; + public CacheStatsDimension(String dimensionName, String dimensionValue) { + this.dimensionName = dimensionName; + this.dimensionValue = dimensionValue; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java new file mode 100644 index 0000000000000..2597d5c678242 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java @@ -0,0 +1,231 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * A CacheStats implementation for caches that aggregate over a single dimension. + * For example, caches in the IndicesRequestCache only aggregate over ShardId value. + */ +public class SingleDimensionCacheStats implements CacheStats { + // Maintain a counter metric for each shard id (dimension values) + private final Map hitsMap; + private final Map missesMap; + private final Map evictionsMap; + private final Map memorySizeMap; + private final Map entriesMap; + + // Also maintain a single total counter metric, to avoid having to sum over many values for shards + private final CounterMetric totalHits; + private final CounterMetric totalMisses; + private final CounterMetric totalEvictions; + private final CounterMetric totalMemorySize; + private final CounterMetric totalEntries; + + // The allowed dimension name. This stats only allows a single dimension name + private final String allowedDimensionName; + + public SingleDimensionCacheStats(String allowedDimensionName) { + this.hitsMap = new HashMap<>(); + this.missesMap = new HashMap<>(); + this.evictionsMap = new HashMap<>(); + this.memorySizeMap = new HashMap<>(); + this.entriesMap = new HashMap<>(); + + this.totalHits = new CounterMetric(); + this.totalMisses = new CounterMetric(); + this.totalEvictions = new CounterMetric(); + this.totalMemorySize = new CounterMetric(); + this.totalEntries = new CounterMetric(); + + this.allowedDimensionName = allowedDimensionName; + } + + public SingleDimensionCacheStats(StreamInput in) throws IOException { + this.hitsMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); + this.missesMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); + this.evictionsMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); + this.memorySizeMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); + this.entriesMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); + + this.totalHits = new CounterMetric(); + totalHits.inc(in.readVLong()); + this.totalMisses = new CounterMetric(); + totalMisses.inc(in.readVLong()); + this.totalEvictions = new CounterMetric(); + totalEvictions.inc(in.readVLong()); + this.totalMemorySize = new CounterMetric(); + totalMemorySize.inc(in.readVLong()); + this.totalEntries = new CounterMetric(); + totalEntries.inc(in.readVLong()); + + this.allowedDimensionName = in.readString(); + } + + @Override + public long getTotalHits() { + return this.totalHits.count(); + } + + @Override + public long getTotalMisses() { + return this.totalMisses.count(); + } + + @Override + public long getTotalEvictions() { + return this.totalEvictions.count(); + } + + @Override + public long getTotalMemorySize() { + return this.totalMemorySize.count(); + } + + @Override + public long getTotalEntries() { + return this.totalEntries.count(); + } + + private long internalGetByDimension(CacheStatsDimension dimension, Map metricsMap) { + CounterMetric counter = metricsMap.get(dimension.dimensionValue); + if (counter == null) { + return 0; + } + return counter.count(); + } + + @Override + public long getHitsByDimension(CacheStatsDimension dimension) { + return internalGetByDimension(dimension, hitsMap); + } + + @Override + public long getMissesByDimension(CacheStatsDimension dimension) { + return internalGetByDimension(dimension, missesMap); + } + + @Override + public long getEvictionsByDimension(CacheStatsDimension dimension) { + return internalGetByDimension(dimension, evictionsMap); + } + + @Override + public long getMemorySizeByDimension(CacheStatsDimension dimension) { + return internalGetByDimension(dimension, memorySizeMap); + } + + @Override + public long getEntriesByDimension(CacheStatsDimension dimension) { + return internalGetByDimension(dimension, entriesMap); + } + + private boolean checkDimensionList(List dimensions) { + return dimensions.size() == 1 && allowedDimensionName.equals(dimensions.get(0).dimensionName); + } + private void internalIncrement(List dimensions, Map metricMap, CounterMetric totalMetric, long incrementAmount) { + if (checkDimensionList(dimensions)) { + String dimensionValue = dimensions.get(0).dimensionValue; + totalMetric.inc(incrementAmount); + CounterMetric counter = metricMap.get(dimensionValue); + if (counter == null) { + counter = new CounterMetric(); + metricMap.put(dimensionValue, counter); + } + counter.inc(incrementAmount); + } + } + + @Override + public void incrementHitsByDimensions(List dimensions) { + internalIncrement(dimensions, hitsMap, totalHits, 1); + } + + @Override + public void incrementMissesByDimensions(List dimensions) { + internalIncrement(dimensions, missesMap, totalMisses, 1); + } + + @Override + public void incrementEvictionsByDimensions(List dimensions) { + internalIncrement(dimensions, evictionsMap, totalEvictions, 1); + } + + @Override + public void incrementMemorySizeByDimensions(List dimensions, long amountBytes) { + internalIncrement(dimensions, memorySizeMap, totalMemorySize, amountBytes); + } + + @Override + public void incrementEntriesByDimensions(List dimensions) { + internalIncrement(dimensions, entriesMap, totalEntries, 1); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(convertCounterMapToLong(hitsMap), StreamOutput::writeString, StreamOutput::writeVLong); + out.writeMap(convertCounterMapToLong(missesMap), StreamOutput::writeString, StreamOutput::writeVLong); + out.writeMap(convertCounterMapToLong(evictionsMap), StreamOutput::writeString, StreamOutput::writeVLong); + out.writeMap(convertCounterMapToLong(memorySizeMap), StreamOutput::writeString, StreamOutput::writeVLong); + out.writeMap(convertCounterMapToLong(entriesMap), StreamOutput::writeString, StreamOutput::writeVLong); + + out.writeVLong(totalHits.count()); + out.writeVLong(totalMisses.count()); + out.writeVLong(totalEvictions.count()); + out.writeVLong(totalMemorySize.count()); + out.writeVLong(totalEntries.count()); + + out.writeString(allowedDimensionName); + } + + public String getAllowedDimensionName() { + return allowedDimensionName; + } + + // For converting to StreamOutput/StreamInput, write maps of longs rather than CounterMetrics which don't support writing + private Map convertCounterMapToLong(Map inputMap) { + Map result = new HashMap<>(); + for (String key : inputMap.keySet()) { + result.put(key, inputMap.get(key).count()); + } + return result; + } + + private Map convertLongMapToCounterMetric(Map inputMap) { + Map result = new HashMap<>(); + for (String key: inputMap.keySet()) { + CounterMetric counter = new CounterMetric(); + counter.inc(inputMap.get(key)); + result.put(key, counter); + } + return result; + } + + /*@Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return null; + }*/ + + /*static final class Fields { + static final String MEMORY_SIZE = "memory_size"; + static final String EVICTIONS = "evictions"; + static final String HIT_COUNT = "hit_count"; + static final String MISS_COUNT = "miss_count"; + static final String ENTRIES = "entries"; + }*/ +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index 5b9ff5921a01c..e93b70dc8b5e0 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -14,6 +14,7 @@ import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; @@ -31,7 +32,7 @@ public class OpenSearchOnHeapCache implements StoreAwareCache, Remov private final StoreAwareCacheEventListener eventListener; - private final CacheStats stats = new OpenSearchOnHeapCacheStats(); + private final CacheStats stats = new SingleDimensionCacheStats(""); public OpenSearchOnHeapCache(Builder builder) { CacheBuilder cacheBuilder = CacheBuilder.builder() @@ -91,7 +92,7 @@ public Iterable keys() { @Override public long count() { - return stats.count(); + return stats.getTotalEntries(); } @Override @@ -127,12 +128,12 @@ public void onRemoval(RemovalNotification notification) { /** * Stats for opensearch on heap cache. */ - class OpenSearchOnHeapCacheStats implements CacheStats { + /*class OpenSearchOnHeapCacheStats implements CacheStats { @Override public long count() { return cache.count(); } - } + }*/ /** * Builder object diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java index 027eef358c2fa..74d72f94576c7 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java @@ -47,7 +47,7 @@ public class TieredSpilloverCache implements ICache, StoreAwareCache private final Optional> onDiskCache; private final StoreAwareCache onHeapCache; private final StoreAwareCacheEventListener listener; - private final CacheStats stats = new TieredSpillOverCacheStats(); + //private final CacheStats stats = new TieredSpillOverCacheStats(); ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); ReleasableLock readLock = new ReleasableLock(readWriteLock.readLock()); ReleasableLock writeLock = new ReleasableLock(readWriteLock.writeLock()); @@ -165,7 +165,7 @@ public Iterable keys() { @Override public long count() { - return stats.count(); + return 0; } @Override @@ -186,7 +186,7 @@ public void close() throws IOException { @Override public CacheStats stats() { - return stats; + return null; } @Override @@ -248,7 +248,7 @@ private Function> getValueFromTieredCache(boolean tri /** * Stats for tiered spillover cache. */ - class TieredSpillOverCacheStats implements CacheStats { + /*class TieredSpillOverCacheStats implements CacheStats { @Override public long count() { @@ -258,7 +258,7 @@ public long count() { } return totalCount; } - } + }*/ /** * Builder object for tiered spillover cache. diff --git a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java new file mode 100644 index 0000000000000..057e7c9f66ce5 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java @@ -0,0 +1,171 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +import org.opensearch.common.Randomness; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; + +public class SingleDimensionCacheStatsTests extends OpenSearchTestCase { + private final String dimensionName = "shardId"; + public void testAddAndGet() throws Exception { + StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(); + SingleDimensionCacheStats stats = statsAndExpectedResults.stats; + + checkShardResults(statsAndExpectedResults); + checkTotalResults(statsAndExpectedResults); + + // Check values returned for a nonexistent dimension value or name return 0 + assertEquals(0, stats.getHitsByDimension(new CacheStatsDimension(dimensionName, "nonexistent"))); + assertEquals(0, stats.getHitsByDimension(new CacheStatsDimension("nonexistentName", "nonexistentValue"))); + } + + public void testSerialization() throws Exception { + StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(); + SingleDimensionCacheStats stats = statsAndExpectedResults.stats; + Map> expectedResults = statsAndExpectedResults.expectedShardResults; + + BytesStreamOutput os = new BytesStreamOutput(); + stats.writeTo(os); + BytesStreamInput is = new BytesStreamInput(BytesReference.toBytes(os.bytes())); + SingleDimensionCacheStats deserialized = new SingleDimensionCacheStats(is); + + StatsAndExpectedResults deserializedStatsAndExpectedResults = new StatsAndExpectedResults(deserialized, expectedResults, statsAndExpectedResults.numShardIds); + checkShardResults(deserializedStatsAndExpectedResults); + checkTotalResults(deserializedStatsAndExpectedResults); + assertEquals(deserialized.getAllowedDimensionName(), stats.getAllowedDimensionName()); + } + + private CacheStatsDimension getDim(int i) { + return new CacheStatsDimension(dimensionName, String.valueOf(i)); + } + + private List getDimList(int i) { + ArrayList result = new ArrayList<>(); + result.add(getDim(i)); + return result; + } + + private long sumMap(Map inputMap) { + long result = 0; + for (String key : inputMap.keySet()) { + result += inputMap.get(key); + } + return result; + } + + private StatsAndExpectedResults getPopulatedStats() { + SingleDimensionCacheStats stats = new SingleDimensionCacheStats(dimensionName); + + int numShardIds = 10; + Map expectedHits = new HashMap<>(); + Map expectedMisses = new HashMap<>(); + Map expectedEvictions = new HashMap<>(); + Map expectedMemorySize = new HashMap<>(); + Map expectedEntries = new HashMap<>(); + + Random rand = Randomness.get(); + + // For each shard id value, increment metrics some random number of times (possibly 0) + for (int shardId = 0; shardId < numShardIds; shardId++) { + + String shardIdString = String.valueOf(shardId); + List dimensions = getDimList(shardId); + + for (Map map : new Map[]{expectedHits, expectedMisses, expectedEvictions, expectedMemorySize, expectedEntries}) { + map.put(shardIdString, 0L); + } + + int numHitIncrements = rand.nextInt(10); + for (int i = 0; i < numHitIncrements; i++) { + stats.incrementHitsByDimensions(dimensions); + expectedHits.put(shardIdString, expectedHits.get(shardIdString) + 1); + } + + int numMissIncrements = rand.nextInt(10); + for (int i = 0; i < numMissIncrements; i++) { + stats.incrementMissesByDimensions(dimensions); + expectedMisses.put(shardIdString, expectedMisses.get(shardIdString) + 1); + } + + int numEvictionIncrements = rand.nextInt(10); + for (int i = 0; i < numEvictionIncrements; i++) { + stats.incrementEvictionsByDimensions(dimensions); + expectedEvictions.put(shardIdString, expectedEvictions.get(shardIdString) + 1); + } + + int numMemorySizeIncrements = rand.nextInt(10); + for (int i = 0; i < numMemorySizeIncrements; i++) { + long memIncrementAmount = (long) rand.nextInt(5000); + stats.incrementMemorySizeByDimensions(dimensions, memIncrementAmount); + expectedMemorySize.put(shardIdString, expectedMemorySize.get(shardIdString) + memIncrementAmount); + } + + int numEntryIncrements = rand.nextInt(10); + for (int i = 0; i < numEntryIncrements; i++) { + stats.incrementEntriesByDimensions(dimensions); + expectedEntries.put(shardIdString, expectedEntries.get(shardIdString) + 1); + } + } + Map> expectedShardResults = new HashMap<>(); + expectedShardResults.put("hits", expectedHits); + expectedShardResults.put("misses", expectedMisses); + expectedShardResults.put("evictions", expectedEvictions); + expectedShardResults.put("memory_size", expectedMemorySize); + expectedShardResults.put("entries", expectedEntries); + return new StatsAndExpectedResults(stats, expectedShardResults, numShardIds); + } + + private void checkShardResults(StatsAndExpectedResults statsAndExpectedResults) { + // check the resulting values on dimension level are what we expect + Map> expectedResults = statsAndExpectedResults.expectedShardResults; + SingleDimensionCacheStats stats = statsAndExpectedResults.stats; + for (int shardId = 0; shardId < statsAndExpectedResults.numShardIds; shardId++) { + String shardIdString = String.valueOf(shardId); + CacheStatsDimension dimension = getDim(shardId); + + assertEquals((long) expectedResults.get("hits").get(shardIdString), stats.getHitsByDimension(dimension)); + assertEquals((long) expectedResults.get("misses").get(shardIdString), stats.getMissesByDimension(dimension)); + assertEquals((long) expectedResults.get("evictions").get(shardIdString), stats.getEvictionsByDimension(dimension)); + assertEquals((long) expectedResults.get("memory_size").get(shardIdString), stats.getMemorySizeByDimension(dimension)); + assertEquals((long) expectedResults.get("entries").get(shardIdString), stats.getEntriesByDimension(dimension)); + } + } + + private void checkTotalResults(StatsAndExpectedResults statsAndExpectedResults) { + // check resulting total values are what we expect + Map> expectedResults = statsAndExpectedResults.expectedShardResults; + SingleDimensionCacheStats stats = statsAndExpectedResults.stats; + assertEquals(sumMap(expectedResults.get("hits")), stats.getTotalHits()); + assertEquals(sumMap(expectedResults.get("misses")), stats.getTotalMisses()); + assertEquals(sumMap(expectedResults.get("evictions")), stats.getTotalEvictions()); + assertEquals(sumMap(expectedResults.get("memory_size")), stats.getTotalMemorySize()); + assertEquals(sumMap(expectedResults.get("entries")), stats.getTotalEntries()); + } + + // Convenience class to allow reusing setup code across tests + private class StatsAndExpectedResults { + private final SingleDimensionCacheStats stats; + private final Map> expectedShardResults; + private final int numShardIds; + private StatsAndExpectedResults(SingleDimensionCacheStats stats, Map> expectedShardResults, int numShardIds) { + this.stats = stats; + this.expectedShardResults = expectedShardResults; + this.numShardIds = numShardIds; + } + } +} From 937ca512dfe06b0e7a0ccb5e58c6b58294abc940 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Thu, 8 Feb 2024 14:54:40 -0800 Subject: [PATCH 02/32] First attempt to integrate new stats with EhcacheDiskCache Signed-off-by: Peter Alfonsi --- .../opensearch/cache/EhcacheCachePlugin.java | 4 +- .../cache/store/disk/EhcacheDiskCache.java | 187 +++++++++--------- .../store/disk/EhCacheDiskCacheTests.java | 149 +++++++------- .../org/opensearch/common/cache/ICache.java | 14 +- .../common/cache/stats/CacheStats.java | 3 + .../common/cache/stats/ICacheKey.java | 21 ++ .../stats/SingleDimensionCacheStats.java | 5 + .../cache/store/OpenSearchOnHeapCache.java | 71 ++++--- .../common/cache/store/StoreAwareCache.java | 4 +- ...reCacheBuilder.java => ICacheBuilder.java} | 33 ++-- ...wareCacheConfig.java => ICacheConfig.java} | 25 +-- .../cache/tier/TieredSpilloverCache.java | 151 +++++++------- .../cache/tier/TieredSpilloverCacheStats.java | 112 +++++++++++ .../stats/SingleDimensionCacheStatsTests.java | 6 + .../cache/tier/TieredSpilloverCacheTests.java | 18 +- 15 files changed, 476 insertions(+), 327 deletions(-) create mode 100644 server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java rename server/src/main/java/org/opensearch/common/cache/store/builders/{StoreAwareCacheBuilder.java => ICacheBuilder.java} (57%) rename server/src/main/java/org/opensearch/common/cache/store/config/{StoreAwareCacheConfig.java => ICacheConfig.java} (70%) create mode 100644 server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java index b82b52806a8b5..58b0bf0567a94 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java @@ -17,6 +17,7 @@ import org.opensearch.plugins.Plugin; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -36,7 +37,8 @@ public EhcacheCachePlugin() {} @Override public Map getCacheStoreTypeMap() { - return Map.of(CacheStoreType.DISK, new EhcacheDiskCache.EhcacheDiskCacheFactory()); + return new HashMap<>(); // TODO: FIX + //return Map.of(CacheStoreType.DISK, new EhcacheDiskCache.EhcacheDiskCacheFactory()); } @Override diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 725d27fc479de..7b5ee34f7772e 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -15,19 +15,17 @@ import org.opensearch.common.SuppressForbidden; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.stats.ICacheKey; import org.opensearch.common.cache.stats.SingleDimensionCacheStats; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; -import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; +import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.metrics.CounterMetric; -import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -43,6 +41,7 @@ import java.util.concurrent.ExecutionException; import java.util.function.BiFunction; import java.util.function.Supplier; +import java.util.function.ToLongBiFunction; import org.ehcache.Cache; import org.ehcache.CachePersistenceException; @@ -61,12 +60,7 @@ import org.ehcache.spi.loaderwriter.CacheLoadingException; import org.ehcache.spi.loaderwriter.CacheWritingException; -import static org.opensearch.cache.EhcacheSettings.DISK_CACHE_ALIAS_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; import static org.opensearch.cache.EhcacheSettings.DISK_SEGMENT_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_STORAGE_PATH_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_STORAGE_PATH_SETTING; import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_CONCURRENCY_KEY; import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MAXIMUM_THREADS_KEY; import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MIN_THREADS_KEY; @@ -80,7 +74,7 @@ * */ @ExperimentalApi -public class EhcacheDiskCache implements StoreAwareCache { +public class EhcacheDiskCache implements ICache { private static final Logger logger = LogManager.getLogger(EhcacheDiskCache.class); @@ -93,7 +87,7 @@ public class EhcacheDiskCache implements StoreAwareCache { private final PersistentCacheManager cacheManager; // Disk cache - private Cache cache; + private Cache cache; private final long maxWeightInBytes; private final String storagePath; private final Class keyType; @@ -103,15 +97,41 @@ public class EhcacheDiskCache implements StoreAwareCache { private final EhCacheEventListener ehCacheEventListener; private final String threadPoolAlias; private final Settings settings; - private final StoreAwareCacheEventListener eventListener; private final CacheType cacheType; private final String diskCacheAlias; + private final String shardIdDimensionName; /** * Used in computeIfAbsent to synchronize loading of a given key. This is needed as ehcache doesn't provide a * computeIfAbsent method. */ - Map>> completableFutureMap = new ConcurrentHashMap<>(); + Map, CompletableFuture, V>>> completableFutureMap = new ConcurrentHashMap<>(); + + // I think we need this to instantiate the cache. We can't pass in values like ICacheKey.class to builders + // due to type erasure. + private class EhcacheKeyWrapper { + private final ICacheKey key; + public EhcacheKeyWrapper(ICacheKey key) { + this.key = key; + } + ICacheKey getKey() { + return key; + } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null) { + return false; + } + if (o.getClass() != EhcacheKeyWrapper.class) { + return false; + } + EhcacheKeyWrapper other = (EhcacheKeyWrapper) o; + return other.getKey().equals(key); + } + } private EhcacheDiskCache(Builder builder) { this.keyType = Objects.requireNonNull(builder.keyType, "Key type shouldn't be null"); @@ -138,34 +158,33 @@ private EhcacheDiskCache(Builder builder) { } this.settings = Objects.requireNonNull(builder.getSettings(), "Settings objects shouldn't be null"); this.cacheManager = buildCacheManager(); - Objects.requireNonNull(builder.getEventListener(), "Listener can't be null"); - this.eventListener = builder.getEventListener(); - this.ehCacheEventListener = new EhCacheEventListener(builder.getEventListener()); + this.ehCacheEventListener = new EhCacheEventListener(Objects.requireNonNull(builder.getRemovalListener(), "Removal listener can't be null")); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); - this.stats = new SingleDimensionCacheStats(builder.shardIdDimensionName); + this.shardIdDimensionName = Objects.requireNonNull(builder.shardIdDimensionName, "Dimension name can't be null"); + this.stats = new SingleDimensionCacheStats(shardIdDimensionName); } - private Cache buildCache(Duration expireAfterAccess, Builder builder) { + private Cache buildCache(Duration expireAfterAccess, Builder builder) { try { return this.cacheManager.createCache( this.diskCacheAlias, CacheConfigurationBuilder.newCacheConfigurationBuilder( - this.keyType, + ICacheKey.class, this.valueType, ResourcePoolsBuilder.newResourcePoolsBuilder().disk(maxWeightInBytes, MemoryUnit.B) ).withExpiry(new ExpiryPolicy<>() { @Override - public Duration getExpiryForCreation(K key, V value) { + public Duration getExpiryForCreation(ICacheKey key, V value) { return INFINITE; } @Override - public Duration getExpiryForAccess(K key, Supplier value) { + public Duration getExpiryForAccess(ICacheKey key, Supplier value) { return expireAfterAccess; } @Override - public Duration getExpiryForUpdate(K key, Supplier oldValue, V newValue) { + public Duration getExpiryForUpdate(ICacheKey key, Supplier oldValue, V newValue) { return INFINITE; } }) @@ -208,7 +227,7 @@ private CacheEventListenerConfigurationBuilder getListenerConfiguration(Builder< } // Package private for testing - Map>> getCompletableFutureMap() { + Map, CompletableFuture, V>>> getCompletableFutureMap() { return completableFutureMap; } @@ -237,7 +256,7 @@ private PersistentCacheManager buildCacheManager() { } @Override - public V get(K key) { + public V get(ICacheKey key) { if (key == null) { throw new IllegalArgumentException("Key passed to ehcache disk cache was null."); } @@ -248,9 +267,9 @@ public V get(K key) { throw new OpenSearchException("Exception occurred while trying to fetch item from ehcache disk cache"); } if (value != null) { - eventListener.onHit(key, value, CacheStoreType.DISK); + stats.incrementHitsByDimensions(key.dimensions); } else { - eventListener.onMiss(key, CacheStoreType.DISK); + stats.incrementMissesByDimensions(key.dimensions); } return value; } @@ -261,7 +280,7 @@ public V get(K key) { * @param value Type of value. */ @Override - public void put(K key, V value) { + public void put(ICacheKey key, V value) { try { cache.put(key, value); } catch (CacheWritingException ex) { @@ -277,8 +296,8 @@ public void put(K key, V value) { * @throws Exception when either internal get or put calls fail. */ @Override - public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { - // Ehache doesn't provide any computeIfAbsent function. Exposes putIfAbsent but that works differently and is + public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> loader) throws Exception { + // Ehcache doesn't provide any computeIfAbsent function. Exposes putIfAbsent but that works differently and is // not performant in case there are multiple concurrent request for same key. Below is our own custom // implementation of computeIfAbsent on top of ehcache. Inspired by OpenSearch Cache implementation. V value = cache.get(key); @@ -286,22 +305,24 @@ public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Except value = compute(key, loader); } if (!loader.isLoaded()) { - eventListener.onHit(key, value, CacheStoreType.DISK); + //eventListener.onHit(key, value, CacheStoreType.DISK); + stats.incrementHitsByDimensions(key.dimensions); } else { - eventListener.onMiss(key, CacheStoreType.DISK); + //eventListener.onMiss(key, CacheStoreType.DISK); + stats.incrementMissesByDimensions(key.dimensions); } return value; } - private V compute(K key, LoadAwareCacheLoader loader) throws Exception { + private V compute(ICacheKey key, LoadAwareCacheLoader, V> loader) throws Exception { // A future that returns a pair of key/value. - CompletableFuture> completableFuture = new CompletableFuture<>(); + CompletableFuture, V>> completableFuture = new CompletableFuture<>(); // Only one of the threads will succeed putting a future into map for the same key. // Rest will fetch existing future. - CompletableFuture> future = completableFutureMap.putIfAbsent(key, completableFuture); + CompletableFuture, V>> future = completableFutureMap.putIfAbsent(key, completableFuture); // Handler to handle results post processing. Takes a tuple or exception as an input and returns // the value. Also before returning value, puts the value in cache. - BiFunction, Throwable, V> handler = (pair, ex) -> { + BiFunction, V>, Throwable, V> handler = (pair, ex) -> { V value = null; if (pair != null) { cache.put(pair.v1(), pair.v2()); @@ -351,7 +372,7 @@ private V compute(K key, LoadAwareCacheLoader loader) throws Exception { * @param key key to be invalidated. */ @Override - public void invalidate(K key) { + public void invalidate(ICacheKey key) { try { cache.remove(key); } catch (CacheWritingException ex) { @@ -369,7 +390,7 @@ public void invalidateAll() {} * @return Iterable */ @Override - public Iterable keys() { + public Iterable> keys() { return () -> new EhCacheKeyIterator<>(cache.iterator()); } @@ -407,25 +428,15 @@ public CacheStats stats() { return stats; } - /** - * Returns the tier type. - * @return CacheStoreType.DISK - */ - @Override - public CacheStoreType getTierType() { - return CacheStoreType.DISK; - } - - /** * This iterator wraps ehCache iterator and only iterates over its keys. * @param Type of key */ - class EhCacheKeyIterator implements Iterator { + class EhCacheKeyIterator implements Iterator> { - Iterator> iterator; + Iterator> iterator; - EhCacheKeyIterator(Iterator> iterator) { + EhCacheKeyIterator(Iterator> iterator) { this.iterator = iterator; } @@ -435,7 +446,7 @@ public boolean hasNext() { } @Override - public K next() { + public ICacheKey next() { if (!hasNext()) { throw new NoSuchElementException(); } @@ -448,56 +459,35 @@ public K next() { * @param Type of key * @param Type of value */ - class EhCacheEventListener implements CacheEventListener { + class EhCacheEventListener implements CacheEventListener, V> { - private final StoreAwareCacheEventListener eventListener; + //private final StoreAwareCacheEventListener eventListener; + private final RemovalListener, V> removalListener; - EhCacheEventListener(StoreAwareCacheEventListener eventListener) { - this.eventListener = eventListener; + EhCacheEventListener(RemovalListener, V> removalListener) { + this.removalListener = removalListener; } @Override - public void onEvent(CacheEvent event) { } - /*switch (event.getType()) { + public void onEvent(CacheEvent, ? extends V> event) { + switch (event.getType()) { case CREATED: - stats.count.inc(); - this.eventListener.onCached(event.getKey(), event.getNewValue(), CacheStoreType.DISK); + stats.incrementEntriesByDimensions(event.getKey().dimensions); assert event.getOldValue() == null; break; case EVICTED: - this.eventListener.onRemoval( - new StoreAwareCacheRemovalNotification<>( - event.getKey(), - event.getOldValue(), - RemovalReason.EVICTED, - CacheStoreType.DISK - ) - ); - stats.count.dec(); + this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.EVICTED)); + stats.decrementEntriesByDimensions(event.getKey().dimensions); assert event.getNewValue() == null; break; case REMOVED: - stats.count.dec(); - this.eventListener.onRemoval( - new StoreAwareCacheRemovalNotification<>( - event.getKey(), - event.getOldValue(), - RemovalReason.EXPLICIT, - CacheStoreType.DISK - ) - ); + this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.EXPLICIT)); + stats.decrementEntriesByDimensions(event.getKey().dimensions); assert event.getNewValue() == null; break; case EXPIRED: - this.eventListener.onRemoval( - new StoreAwareCacheRemovalNotification<>( - event.getKey(), - event.getOldValue(), - RemovalReason.INVALIDATED, - CacheStoreType.DISK - ) - ); - stats.count.dec(); + this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.INVALIDATED)); + stats.decrementEntriesByDimensions(event.getKey().dimensions); assert event.getNewValue() == null; break; case UPDATED: @@ -505,26 +495,26 @@ public void onEvent(CacheEvent event) { } default: break; } - }*/ + } } /** * Factory to create an ehcache disk cache. */ - public class EhcacheDiskCacheFactory implements StoreAwareCache.Factory { + /*public class EhcacheDiskCacheFactory implements ICache.Factory { /** * Ehcache disk cache name. */ - public static final String EHCACHE_DISK_CACHE_NAME = "ehcacheDiskCache"; + /*public static final String EHCACHE_DISK_CACHE_NAME = "ehcacheDiskCache"; /** * Default constructor. */ - public EhcacheDiskCacheFactory() {} + /*public EhcacheDiskCacheFactory() {} @Override - public StoreAwareCache create(StoreAwareCacheConfig config, CacheType cacheType) { + public ICache create(ICacheConfig config, CacheType cacheType) { Map> settingList = EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK); Settings settings = config.getSettings(); Setting stringSetting = DISK_STORAGE_PATH_SETTING.getConcreteSettingForNamespace( @@ -535,7 +525,7 @@ public StoreAwareCache create(StoreAwareCacheConfig config, C .setCacheType(cacheType) .setKeyType((config.getKeyType())) .setValueType(config.getValueType()) - .setEventListener(config.getEventListener()) + .setRemovalListener(config.getRemovalListener()) .setExpireAfterAccess((TimeValue) settingList.get(DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY).get(settings)) .setMaximumWeightInBytes((Long) settingList.get(DISK_MAX_SIZE_IN_BYTES_KEY).get(settings)) .setSettings(settings) @@ -546,14 +536,15 @@ public StoreAwareCache create(StoreAwareCacheConfig config, C public String getCacheName() { return EHCACHE_DISK_CACHE_NAME; } - } + }*/ /** * Builder object to build Ehcache disk tier. * @param Type of key * @param Type of value */ - public class Builder extends StoreAwareCacheBuilder { + public static class Builder extends ICacheBuilder { + // TODO: Should inherit from whatever new thing Sagar adds (ICacheBuilder?) private CacheType cacheType; private String storagePath; @@ -650,7 +641,7 @@ public Builder setShardIdDimensionName(String dimensionName) { return this; } - @Override + //@Override public EhcacheDiskCache build() { return new EhcacheDiskCache<>(this); } diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 0f687cf923e66..088b1d8eac2b6 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -8,12 +8,14 @@ package org.opensearch.cache.store.disk; -import org.opensearch.cache.EhcacheSettings; import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.LoadAwareCacheLoader; -import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.stats.CacheStatsDimension; +import org.opensearch.common.cache.stats.ICacheKey; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.settings.Settings; @@ -33,19 +35,18 @@ import java.util.concurrent.Phaser; import java.util.concurrent.atomic.AtomicInteger; -import static org.opensearch.cache.EhcacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_STORAGE_PATH_KEY; import static org.hamcrest.CoreMatchers.instanceOf; public class EhCacheDiskCacheTests extends OpenSearchSingleNodeTestCase { private static final int CACHE_SIZE_IN_BYTES = 1024 * 101; + private final String dimensionName = "shardId"; public void testBasicGetAndPut() throws IOException { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setThreadPoolAlias("ehcacheTest") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) @@ -53,7 +54,7 @@ public void testBasicGetAndPut() throws IOException { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(mockRemovalListener) .build(); int randomKeys = randomIntBetween(10, 100); Map keyValueMap = new HashMap<>(); @@ -61,32 +62,32 @@ public void testBasicGetAndPut() throws IOException { keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); } for (Map.Entry entry : keyValueMap.entrySet()) { - ehcacheTest.put(entry.getKey(), entry.getValue()); + ehcacheTest.put(getICacheKey(entry.getKey()), entry.getValue()); } for (Map.Entry entry : keyValueMap.entrySet()) { - String value = ehcacheTest.get(entry.getKey()); + String value = ehcacheTest.get(getICacheKey(entry.getKey())); assertEquals(entry.getValue(), value); } - assertEquals(randomKeys, mockEventListener.onCachedCount.get()); - assertEquals(randomKeys, mockEventListener.onHitCount.get()); + //assertEquals(randomKeys, mockEventListener.onCachedCount.get()); + //assertEquals(randomKeys, mockEventListener.onHitCount.get()); // Validate misses int expectedNumberOfMisses = randomIntBetween(10, 200); for (int i = 0; i < expectedNumberOfMisses; i++) { - ehcacheTest.get(UUID.randomUUID().toString()); + ehcacheTest.get(getICacheKey(UUID.randomUUID().toString())); } - assertEquals(expectedNumberOfMisses, mockEventListener.onMissCount.get()); + //assertEquals(expectedNumberOfMisses, mockEventListener.onMissCount.get()); ehcacheTest.close(); } } - public void testBasicGetAndPutUsingFactory() throws IOException { - MockEventListener mockEventListener = new MockEventListener<>(); + /*public void testBasicGetAndPutUsingFactory() throws IOException { + MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(Settings.EMPTY)) { StoreAwareCache.Factory ehcacheFactory = new EhcacheDiskCache.EhcacheDiskCacheFactory(); StoreAwareCache ehcacheTest = ehcacheFactory.create( - new StoreAwareCacheConfig.Builder().setValueType(String.class) + new ICacheConfig.Builder().setValueType(String.class) .setKeyType(String.class) .setEventListener(mockEventListener) .setSettings( @@ -132,13 +133,13 @@ public void testBasicGetAndPutUsingFactory() throws IOException { assertEquals(expectedNumberOfMisses, mockEventListener.onMissCount.get()); ehcacheTest.close(); } - } + }*/ public void testConcurrentPut() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) @@ -147,7 +148,7 @@ public void testConcurrentPut() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(mockRemovalListener) .build(); int randomKeys = randomIntBetween(20, 100); Thread[] threads = new Thread[randomKeys]; @@ -161,7 +162,7 @@ public void testConcurrentPut() throws Exception { for (Map.Entry entry : keyValueMap.entrySet()) { threads[j] = new Thread(() -> { phaser.arriveAndAwaitAdvance(); - ehcacheTest.put(entry.getKey(), entry.getValue()); + ehcacheTest.put(getICacheKey(entry.getKey()), entry.getValue()); countDownLatch.countDown(); }); threads[j].start(); @@ -170,19 +171,19 @@ public void testConcurrentPut() throws Exception { phaser.arriveAndAwaitAdvance(); // Will trigger parallel puts above. countDownLatch.await(); // Wait for all threads to finish for (Map.Entry entry : keyValueMap.entrySet()) { - String value = ehcacheTest.get(entry.getKey()); + String value = ehcacheTest.get(getICacheKey(entry.getKey())); assertEquals(entry.getValue(), value); } - assertEquals(randomKeys, mockEventListener.onCachedCount.get()); + //assertEquals(randomKeys, mockEventListener.onCachedCount.get()); ehcacheTest.close(); } } public void testEhcacheParallelGets() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setIsEventListenerModeSync(true) // For accurate count @@ -192,7 +193,7 @@ public void testEhcacheParallelGets() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(mockRemovalListener) .build(); int randomKeys = randomIntBetween(20, 100); Thread[] threads = new Thread[randomKeys]; @@ -204,13 +205,13 @@ public void testEhcacheParallelGets() throws Exception { keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); } for (Map.Entry entry : keyValueMap.entrySet()) { - ehcacheTest.put(entry.getKey(), entry.getValue()); + ehcacheTest.put(getICacheKey(entry.getKey()), entry.getValue()); } assertEquals(keyValueMap.size(), ehcacheTest.count()); for (Map.Entry entry : keyValueMap.entrySet()) { threads[j] = new Thread(() -> { phaser.arriveAndAwaitAdvance(); - assertEquals(entry.getValue(), ehcacheTest.get(entry.getKey())); + assertEquals(entry.getValue(), ehcacheTest.get(getICacheKey(entry.getKey()))); countDownLatch.countDown(); }); threads[j].start(); @@ -218,7 +219,7 @@ public void testEhcacheParallelGets() throws Exception { } phaser.arriveAndAwaitAdvance(); // Will trigger parallel puts above. countDownLatch.await(); // Wait for all threads to finish - assertEquals(randomKeys, mockEventListener.onHitCount.get()); + //assertEquals(randomKeys, mockEventListener.onHitCount.get()); ehcacheTest.close(); } } @@ -226,7 +227,7 @@ public void testEhcacheParallelGets() throws Exception { public void testEhcacheKeyIterator() throws Exception { Settings settings = Settings.builder().build(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) @@ -235,7 +236,7 @@ public void testEhcacheKeyIterator() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(new MockEventListener<>()) + .setRemovalListener(new MockRemovalListener<>()) .build(); int randomKeys = randomIntBetween(2, 100); @@ -244,16 +245,16 @@ public void testEhcacheKeyIterator() throws Exception { keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); } for (Map.Entry entry : keyValueMap.entrySet()) { - ehcacheTest.put(entry.getKey(), entry.getValue()); + ehcacheTest.put(getICacheKey(entry.getKey()), entry.getValue()); } - Iterator keys = ehcacheTest.keys().iterator(); + Iterator> keys = ehcacheTest.keys().iterator(); int keysCount = 0; while (keys.hasNext()) { - String key = keys.next(); + ICacheKey key = keys.next(); keysCount++; assertNotNull(ehcacheTest.get(key)); } - assertEquals(CacheStoreType.DISK, ehcacheTest.getTierType()); + //assertEquals(CacheStoreType.DISK, ehcacheTest.getTierType()); assertEquals(keysCount, randomKeys); ehcacheTest.close(); } @@ -261,9 +262,9 @@ public void testEhcacheKeyIterator() throws Exception { public void testEvictions() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setIsEventListenerModeSync(true) .setThreadPoolAlias("ehcacheTest") @@ -273,7 +274,7 @@ public void testEvictions() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(mockRemovalListener) .build(); // Generate a string with 100 characters @@ -282,18 +283,18 @@ public void testEvictions() throws Exception { // Trying to generate more than 100kb to cause evictions. for (int i = 0; i < 1000; i++) { String key = "Key" + i; - ehcacheTest.put(key, value); + ehcacheTest.put(getICacheKey(key), value); } - assertTrue(mockEventListener.onRemovalCount.get() > 0); + //assertTrue(mockEventListener.onRemovalCount.get() > 0); ehcacheTest.close(); } } public void testComputeIfAbsentConcurrently() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setIsEventListenerModeSync(true) .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setThreadPoolAlias("ehcacheTest") @@ -303,7 +304,7 @@ public void testComputeIfAbsentConcurrently() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(mockRemovalListener) .build(); int numberOfRequest = 2;// randomIntBetween(200, 400); @@ -313,12 +314,12 @@ public void testComputeIfAbsentConcurrently() throws Exception { Phaser phaser = new Phaser(numberOfRequest + 1); CountDownLatch countDownLatch = new CountDownLatch(numberOfRequest); - List> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + List, String>> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); // Try to hit different request with the same key concurrently. Verify value is only loaded once. for (int i = 0; i < numberOfRequest; i++) { threads[i] = new Thread(() -> { - LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader<>() { + LoadAwareCacheLoader, String> loadAwareCacheLoader = new LoadAwareCacheLoader<>() { boolean isLoaded; @Override @@ -327,7 +328,7 @@ public boolean isLoaded() { } @Override - public String load(String key) { + public String load(ICacheKey key) { isLoaded = true; return value; } @@ -335,7 +336,7 @@ public String load(String key) { loadAwareCacheLoaderList.add(loadAwareCacheLoader); phaser.arriveAndAwaitAdvance(); try { - assertEquals(value, ehcacheTest.computeIfAbsent(key, loadAwareCacheLoader)); + assertEquals(value, ehcacheTest.computeIfAbsent(getICacheKey(key), loadAwareCacheLoader)); } catch (Exception e) { throw new RuntimeException(e); } @@ -353,18 +354,18 @@ public String load(String key) { } assertEquals(1, numberOfTimesValueLoaded); assertEquals(0, ((EhcacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); - assertEquals(1, mockEventListener.onMissCount.get()); - assertEquals(1, mockEventListener.onCachedCount.get()); - assertEquals(numberOfRequest - 1, mockEventListener.onHitCount.get()); + //assertEquals(1, mockEventListener.onMissCount.get()); + //assertEquals(1, mockEventListener.onCachedCount.get()); + //assertEquals(numberOfRequest - 1, mockEventListener.onHitCount.get()); ehcacheTest.close(); } } public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setIsEventListenerModeSync(true) .setThreadPoolAlias("ehcacheTest") @@ -374,7 +375,7 @@ public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(mockRemovalListener) .build(); int numberOfRequest = randomIntBetween(200, 400); @@ -383,12 +384,12 @@ public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception Phaser phaser = new Phaser(numberOfRequest + 1); CountDownLatch countDownLatch = new CountDownLatch(numberOfRequest); - List> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + List, String>> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); // Try to hit different request with the same key concurrently. Loader throws exception. for (int i = 0; i < numberOfRequest; i++) { threads[i] = new Thread(() -> { - LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader<>() { + LoadAwareCacheLoader, String> loadAwareCacheLoader = new LoadAwareCacheLoader<>() { boolean isLoaded; @Override @@ -397,14 +398,14 @@ public boolean isLoaded() { } @Override - public String load(String key) throws Exception { + public String load(ICacheKey key) throws Exception { isLoaded = true; throw new RuntimeException("Exception"); } }; loadAwareCacheLoaderList.add(loadAwareCacheLoader); phaser.arriveAndAwaitAdvance(); - assertThrows(ExecutionException.class, () -> ehcacheTest.computeIfAbsent(key, loadAwareCacheLoader)); + assertThrows(ExecutionException.class, () -> ehcacheTest.computeIfAbsent(getICacheKey(key), loadAwareCacheLoader)); countDownLatch.countDown(); }); threads[i].start(); @@ -419,9 +420,9 @@ public String load(String key) throws Exception { public void testComputeIfAbsentWithNullValueLoading() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setIsEventListenerModeSync(true) .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") @@ -431,7 +432,7 @@ public void testComputeIfAbsentWithNullValueLoading() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(mockRemovalListener) .build(); int numberOfRequest = randomIntBetween(200, 400); @@ -440,12 +441,12 @@ public void testComputeIfAbsentWithNullValueLoading() throws Exception { Phaser phaser = new Phaser(numberOfRequest + 1); CountDownLatch countDownLatch = new CountDownLatch(numberOfRequest); - List> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + List, String>> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); // Try to hit different request with the same key concurrently. Loader throws exception. for (int i = 0; i < numberOfRequest; i++) { threads[i] = new Thread(() -> { - LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader<>() { + LoadAwareCacheLoader, String> loadAwareCacheLoader = new LoadAwareCacheLoader<>() { boolean isLoaded; @Override @@ -454,7 +455,7 @@ public boolean isLoaded() { } @Override - public String load(String key) throws Exception { + public String load(ICacheKey key) throws Exception { isLoaded = true; return null; } @@ -462,11 +463,11 @@ public String load(String key) throws Exception { loadAwareCacheLoaderList.add(loadAwareCacheLoader); phaser.arriveAndAwaitAdvance(); try { - ehcacheTest.computeIfAbsent(key, loadAwareCacheLoader); + ehcacheTest.computeIfAbsent(getICacheKey(key), loadAwareCacheLoader); } catch (Exception ex) { assertThat(ex.getCause(), instanceOf(NullPointerException.class)); } - assertThrows(ExecutionException.class, () -> ehcacheTest.computeIfAbsent(key, loadAwareCacheLoader)); + assertThrows(ExecutionException.class, () -> ehcacheTest.computeIfAbsent(getICacheKey(key), loadAwareCacheLoader)); countDownLatch.countDown(); }); threads[i].start(); @@ -491,6 +492,14 @@ private static String generateRandomString(int length) { return randomString.toString(); } + private List getMockDimensions() { + return List.of(new CacheStatsDimension(dimensionName, "0")); + } + + private ICacheKey getICacheKey(String key) { + return new ICacheKey<>(key, getMockDimensions()); + } + class MockEventListener implements StoreAwareCacheEventListener { AtomicInteger onMissCount = new AtomicInteger(); @@ -524,4 +533,12 @@ public void onCached(K key, V value, CacheStoreType cacheStoreType) { onCachedCount.incrementAndGet(); } } + + class MockRemovalListener implements RemovalListener, V> { + AtomicInteger onRemovalCount = new AtomicInteger(); + @Override + public void onRemoval(RemovalNotification, V> notification) { + onRemovalCount.incrementAndGet(); + } + } } diff --git a/server/src/main/java/org/opensearch/common/cache/ICache.java b/server/src/main/java/org/opensearch/common/cache/ICache.java index fad2a31786825..174004ff720f0 100644 --- a/server/src/main/java/org/opensearch/common/cache/ICache.java +++ b/server/src/main/java/org/opensearch/common/cache/ICache.java @@ -8,7 +8,11 @@ package org.opensearch.common.cache; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.stats.ICacheKey; +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.config.ICacheConfig; import java.io.Closeable; @@ -20,17 +24,17 @@ * @opensearch.experimental */ public interface ICache extends Closeable { - V get(K key); + V get(ICacheKey key); - void put(K key, V value); + void put(ICacheKey key, V value); - V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception; + V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> loader) throws Exception; - void invalidate(K key); + void invalidate(ICacheKey key); void invalidateAll(); - Iterable keys(); + Iterable> keys(); long count(); diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java index 820c530df462a..a2c0d2eb3483b 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -32,7 +32,10 @@ public interface CacheStats extends Writeable { // TODO: Make this extend ToXCon void incrementHitsByDimensions(List dimensions); void incrementMissesByDimensions(List dimensions); void incrementEvictionsByDimensions(List dimensions); + // Can also use to decrement, with negative values void incrementMemorySizeByDimensions(List dimensions, long amountBytes); void incrementEntriesByDimensions(List dimensions); + void decrementEntriesByDimensions(List dimensions); + } diff --git a/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java b/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java new file mode 100644 index 0000000000000..bcbc30f84f899 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java @@ -0,0 +1,21 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +import java.util.List; + +public class ICacheKey { + public final K key; + public final List dimensions; + + public ICacheKey(K key, List dimensions) { + this.key = key; + this.dimensions = dimensions; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java index 2597d5c678242..9f0bc0ba68c55 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java @@ -176,6 +176,11 @@ public void incrementEntriesByDimensions(List dimensions) { internalIncrement(dimensions, entriesMap, totalEntries, 1); } + @Override + public void decrementEntriesByDimensions(List dimensions) { + internalIncrement(dimensions, entriesMap, totalEntries, -1); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(convertCounterMapToLong(hitsMap), StreamOutput::writeString, StreamOutput::writeVLong); diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index e93b70dc8b5e0..956829a639aa3 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -10,14 +10,18 @@ import org.opensearch.common.cache.Cache; import org.opensearch.common.cache.CacheBuilder; +import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.stats.ICacheKey; import org.opensearch.common.cache.stats.SingleDimensionCacheStats; -import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; -import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.cache.store.builders.ICacheBuilder; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; + +import java.util.function.ToLongBiFunction; /** * This variant of on-heap cache uses OpenSearch custom cache implementation. @@ -26,16 +30,14 @@ * * @opensearch.experimental */ -public class OpenSearchOnHeapCache implements StoreAwareCache, RemovalListener { - - private final Cache cache; +public class OpenSearchOnHeapCache implements ICache, RemovalListener, V> { - private final StoreAwareCacheEventListener eventListener; + private final Cache, V> cache; - private final CacheStats stats = new SingleDimensionCacheStats(""); + private final CacheStats stats; public OpenSearchOnHeapCache(Builder builder) { - CacheBuilder cacheBuilder = CacheBuilder.builder() + CacheBuilder, V> cacheBuilder = CacheBuilder., V>builder() .setMaximumWeight(builder.getMaxWeightInBytes()) .weigher(builder.getWeigher()) .removalListener(this); @@ -43,40 +45,40 @@ public OpenSearchOnHeapCache(Builder builder) { cacheBuilder.setExpireAfterAccess(builder.getExpireAfterAcess()); } cache = cacheBuilder.build(); - this.eventListener = builder.getEventListener(); + this.stats = new SingleDimensionCacheStats(builder.shardIdDimensionName); } @Override - public V get(K key) { + public V get(ICacheKey key) { V value = cache.get(key); if (value != null) { - eventListener.onHit(key, value, CacheStoreType.ON_HEAP); + //eventListener.onHit(key, value, CacheStoreType.ON_HEAP); } else { - eventListener.onMiss(key, CacheStoreType.ON_HEAP); + //eventListener.onMiss(key, CacheStoreType.ON_HEAP); } return value; } @Override - public void put(K key, V value) { + public void put(ICacheKey key, V value) { cache.put(key, value); - eventListener.onCached(key, value, CacheStoreType.ON_HEAP); + //eventListener.onCached(key, value, CacheStoreType.ON_HEAP); } @Override - public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> loader) throws Exception { V value = cache.computeIfAbsent(key, key1 -> loader.load(key)); if (!loader.isLoaded()) { - eventListener.onHit(key, value, CacheStoreType.ON_HEAP); + //eventListener.onHit(key, value, CacheStoreType.ON_HEAP); } else { - eventListener.onMiss(key, CacheStoreType.ON_HEAP); - eventListener.onCached(key, value, CacheStoreType.ON_HEAP); + //eventListener.onMiss(key, CacheStoreType.ON_HEAP); + //eventListener.onCached(key, value, CacheStoreType.ON_HEAP); } return value; } @Override - public void invalidate(K key) { + public void invalidate(ICacheKey key) { cache.invalidate(key); } @@ -86,7 +88,7 @@ public void invalidateAll() { } @Override - public Iterable keys() { + public Iterable> keys() { return cache.keys(); } @@ -109,20 +111,8 @@ public CacheStats stats() { } @Override - public CacheStoreType getTierType() { - return CacheStoreType.ON_HEAP; - } - - @Override - public void onRemoval(RemovalNotification notification) { - eventListener.onRemoval( - new StoreAwareCacheRemovalNotification<>( - notification.getKey(), - notification.getValue(), - notification.getRemovalReason(), - CacheStoreType.ON_HEAP - ) - ); + public void onRemoval(RemovalNotification, V> notification) { + // TODO } /** @@ -140,10 +130,15 @@ public long count() { * @param Type of key * @param Type of value */ - public static class Builder extends StoreAwareCacheBuilder { + public static class Builder extends ICacheBuilder { - @Override - public StoreAwareCache build() { + private String shardIdDimensionName; + + public Builder setShardIdDimensionName(String dimensionName) { + this.shardIdDimensionName = dimensionName; + return this; + } + public ICache build() { return new OpenSearchOnHeapCache(this); } } diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java index 46bb7f1be6986..40423fb6c0837 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java @@ -11,7 +11,7 @@ import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; +import org.opensearch.common.cache.store.config.ICacheConfig; import org.opensearch.common.cache.store.enums.CacheStoreType; /** @@ -30,7 +30,7 @@ public interface StoreAwareCache extends ICache { */ @ExperimentalApi interface Factory { - StoreAwareCache create(StoreAwareCacheConfig config, CacheType cacheType); + StoreAwareCache create(ICacheConfig config, CacheType cacheType); String getCacheName(); } diff --git a/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java b/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java similarity index 57% rename from server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java rename to server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java index 3b52afebf5579..9ec6346f6698e 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java +++ b/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java @@ -8,6 +8,9 @@ package org.opensearch.common.cache.store.builders; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.stats.ICacheKey; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.settings.Settings; @@ -22,41 +25,41 @@ * * @opensearch.internal */ -public abstract class StoreAwareCacheBuilder { +public abstract class ICacheBuilder { private long maxWeightInBytes; - private ToLongBiFunction weigher; + private ToLongBiFunction, V> weigher; private TimeValue expireAfterAcess; - private StoreAwareCacheEventListener eventListener; - private Settings settings; - public StoreAwareCacheBuilder() {} + private RemovalListener, V> removalListener; + + public ICacheBuilder() {} - public StoreAwareCacheBuilder setMaximumWeightInBytes(long sizeInBytes) { + public ICacheBuilder setMaximumWeightInBytes(long sizeInBytes) { this.maxWeightInBytes = sizeInBytes; return this; } - public StoreAwareCacheBuilder setWeigher(ToLongBiFunction weigher) { + public ICacheBuilder setWeigher(ToLongBiFunction, V> weigher) { this.weigher = weigher; return this; } - public StoreAwareCacheBuilder setExpireAfterAccess(TimeValue expireAfterAcess) { + public ICacheBuilder setExpireAfterAccess(TimeValue expireAfterAcess) { this.expireAfterAcess = expireAfterAcess; return this; } - public StoreAwareCacheBuilder setEventListener(StoreAwareCacheEventListener eventListener) { - this.eventListener = eventListener; + public ICacheBuilder setRemovalListener(RemovalListener, V> listener) { + this.removalListener = listener; return this; } - public StoreAwareCacheBuilder setSettings(Settings settings) { + public ICacheBuilder setSettings(Settings settings) { this.settings = settings; return this; } @@ -69,17 +72,17 @@ public TimeValue getExpireAfterAcess() { return expireAfterAcess; } - public ToLongBiFunction getWeigher() { + public ToLongBiFunction, V> getWeigher() { return weigher; } - public StoreAwareCacheEventListener getEventListener() { - return this.eventListener; + public RemovalListener, V> getRemovalListener() { + return removalListener; } public Settings getSettings() { return settings; } - public abstract StoreAwareCache build(); + public abstract ICache build(); } diff --git a/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java b/server/src/main/java/org/opensearch/common/cache/store/config/ICacheConfig.java similarity index 70% rename from server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java rename to server/src/main/java/org/opensearch/common/cache/store/config/ICacheConfig.java index 98aba608e0756..d62210b81a92a 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java +++ b/server/src/main/java/org/opensearch/common/cache/store/config/ICacheConfig.java @@ -9,6 +9,8 @@ package org.opensearch.common.cache.store.config; import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.stats.ICacheKey; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.settings.Settings; @@ -18,9 +20,10 @@ * @opensearch.experimental */ @ExperimentalApi -public class StoreAwareCacheConfig { +public class ICacheConfig { - private StoreAwareCacheEventListener eventListener; + //private StoreAwareCacheEventListener eventListener; + private RemovalListener, V> removalListener; private Settings settings; @@ -28,15 +31,15 @@ public class StoreAwareCacheConfig { private Class valueType; - private StoreAwareCacheConfig(Builder builder) { + private ICacheConfig(Builder builder) { this.keyType = builder.keyType; this.valueType = builder.valueType; this.settings = builder.settings; - this.eventListener = builder.eventListener; + this.removalListener = builder.removalListener; } - public StoreAwareCacheEventListener getEventListener() { - return eventListener; + public RemovalListener, V> getRemovalListener() { + return removalListener; } public Class getKeyType() { @@ -58,7 +61,7 @@ public Settings getSettings() { */ public static class Builder { - private StoreAwareCacheEventListener eventListener; + private RemovalListener, V> removalListener; private Settings settings; @@ -68,8 +71,8 @@ public static class Builder { public Builder() {} - public Builder setEventListener(StoreAwareCacheEventListener listener) { - this.eventListener = listener; + public Builder setRemovalListener(RemovalListener, V> listener) { + this.removalListener = listener; return this; } @@ -88,8 +91,8 @@ public Builder setValueType(Class valueType) { return this; } - public StoreAwareCacheConfig build() { - return new StoreAwareCacheConfig<>(this); + public ICacheConfig build() { + return new ICacheConfig<>(this); } } } diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java index 74d72f94576c7..d25137635edfd 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java @@ -10,12 +10,15 @@ import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.stats.ICacheKey; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; import org.opensearch.common.cache.store.StoreAwareCacheValue; -import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.util.concurrent.ReleasableLock; @@ -41,13 +44,18 @@ * * @opensearch.experimental */ -public class TieredSpilloverCache implements ICache, StoreAwareCacheEventListener { +public class TieredSpilloverCache implements ICache { // TODO: Remove optional when diskCache implementation is integrated. - private final Optional> onDiskCache; - private final StoreAwareCache onHeapCache; - private final StoreAwareCacheEventListener listener; - //private final CacheStats stats = new TieredSpillOverCacheStats(); + private final Optional> onDiskCache; + private final ICache onHeapCache; + // Listeners for removals from the two tiers + private final RemovalListener, V> onDiskRemovalListener; + private final RemovalListener, V> onHeapRemovalListener; + + // The listener for removals from the spillover cache as a whole + private final RemovalListener, V> removalListener; + private final CacheStats stats; ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); ReleasableLock readLock = new ReleasableLock(readWriteLock.readLock()); ReleasableLock writeLock = new ReleasableLock(readWriteLock.writeLock()); @@ -55,54 +63,53 @@ public class TieredSpilloverCache implements ICache, StoreAwareCache /** * Maintains caching tiers in ascending order of cache latency. */ - private final List> cacheList; + private final List> cacheList; TieredSpilloverCache(Builder builder) { Objects.requireNonNull(builder.onHeapCacheBuilder, "onHeap cache builder can't be null"); - this.onHeapCache = builder.onHeapCacheBuilder.setEventListener(this).build(); + this.onHeapRemovalListener = new TierRemovalListener<>(); + this.onDiskRemovalListener = new TierRemovalListener<>(); + this.onHeapCache = builder.onHeapCacheBuilder.setRemovalListener(onHeapRemovalListener).build(); if (builder.onDiskCacheBuilder != null) { - this.onDiskCache = Optional.of(builder.onDiskCacheBuilder.setEventListener(this).build()); + this.onDiskCache = Optional.of(builder.onDiskCacheBuilder.setRemovalListener(onDiskRemovalListener).build()); } else { this.onDiskCache = Optional.empty(); } - this.listener = builder.listener; + this.removalListener = builder.removalListener; this.cacheList = this.onDiskCache.map(diskTier -> Arrays.asList(this.onHeapCache, diskTier)).orElse(List.of(this.onHeapCache)); + this.stats = new TieredSpilloverCacheStats(onHeapCache.stats(), onDiskCache.get().stats()); } // Package private for testing - StoreAwareCache getOnHeapCache() { + ICache getOnHeapCache() { return onHeapCache; } // Package private for testing - Optional> getOnDiskCache() { + Optional> getOnDiskCache() { return onDiskCache; } @Override - public V get(K key) { - StoreAwareCacheValue cacheValue = getValueFromTieredCache(true).apply(key); - if (cacheValue == null) { - return null; - } - return cacheValue.getValue(); + public V get(ICacheKey key) { + V cacheValue = getValueFromTieredCache(true).apply(key); + return cacheValue; } @Override - public void put(K key, V value) { + public void put(ICacheKey key, V value) { try (ReleasableLock ignore = writeLock.acquire()) { onHeapCache.put(key, value); - listener.onCached(key, value, CacheStoreType.ON_HEAP); } } @Override - public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> loader) throws Exception { // We are skipping calling event listeners at this step as we do another get inside below computeIfAbsent. // Where we might end up calling onMiss twice for a key not present in onHeap cache. // Similary we might end up calling both onMiss and onHit for a key, in case we are receiving concurrent // requests for the same key which requires loading only once. - StoreAwareCacheValue cacheValue = getValueFromTieredCache(false).apply(key); + V cacheValue = getValueFromTieredCache(false).apply(key); if (cacheValue == null) { // Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside. // This is needed as there can be many requests for the same key at the same time and we only want to load @@ -112,28 +119,28 @@ public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Except value = onHeapCache.computeIfAbsent(key, loader); } if (loader.isLoaded()) { - listener.onMiss(key, CacheStoreType.ON_HEAP); - onDiskCache.ifPresent(diskTier -> listener.onMiss(key, CacheStoreType.DISK)); - listener.onCached(key, value, CacheStoreType.ON_HEAP); + //listener.onMiss(key, CacheStoreType.ON_HEAP); + //onDiskCache.ifPresent(diskTier -> listener.onMiss(key, CacheStoreType.DISK)); + //listener.onCached(key, value, CacheStoreType.ON_HEAP); } else { - listener.onHit(key, value, CacheStoreType.ON_HEAP); + //listener.onHit(key, value, CacheStoreType.ON_HEAP); } return value; } - listener.onHit(key, cacheValue.getValue(), cacheValue.getCacheStoreType()); - if (cacheValue.getCacheStoreType().equals(CacheStoreType.DISK)) { - listener.onMiss(key, CacheStoreType.ON_HEAP); - } - return cacheValue.getValue(); + //listener.onHit(key, cacheValue.getValue(), cacheValue.getCacheStoreType()); + /*if (cacheValue.getCacheStoreType().equals(CacheStoreType.DISK)) { + //listener.onMiss(key, CacheStoreType.ON_HEAP); + }*/ + return cacheValue; //cacheValue.getValue(); } @Override - public void invalidate(K key) { + public void invalidate(ICacheKey key) { // We are trying to invalidate the key from all caches though it would be present in only of them. // Doing this as we don't know where it is located. We could do a get from both and check that, but what will // also trigger a hit/miss listener event, so ignoring it for now. try (ReleasableLock ignore = writeLock.acquire()) { - for (StoreAwareCache storeAwareCache : cacheList) { + for (ICache storeAwareCache : cacheList) { storeAwareCache.invalidate(key); } } @@ -142,7 +149,7 @@ public void invalidate(K key) { @Override public void invalidateAll() { try (ReleasableLock ignore = writeLock.acquire()) { - for (StoreAwareCache storeAwareCache : cacheList) { + for (ICache storeAwareCache : cacheList) { storeAwareCache.invalidateAll(); } } @@ -153,8 +160,8 @@ public void invalidateAll() { * @return An iterable over (onHeap + disk) keys */ @Override - public Iterable keys() { - Iterable onDiskKeysIterable; + public Iterable> keys() { + Iterable> onDiskKeysIterable; if (onDiskCache.isPresent()) { onDiskKeysIterable = onDiskCache.get().keys(); } else { @@ -171,30 +178,25 @@ public long count() { @Override public void refresh() { try (ReleasableLock ignore = writeLock.acquire()) { - for (StoreAwareCache storeAwareCache : cacheList) { - storeAwareCache.refresh(); + for (ICache cache : cacheList) { + cache.refresh(); } } } @Override public void close() throws IOException { - for (StoreAwareCache storeAwareCache : cacheList) { - storeAwareCache.close(); + for (ICache cache : cacheList) { + cache.close(); } } @Override public CacheStats stats() { - return null; + return stats; } - @Override - public void onMiss(K key, CacheStoreType cacheStoreType) { - // Misses for tiered cache are tracked here itself. - } - - @Override + /*@Override public void onRemoval(StoreAwareCacheRemovalNotification notification) { if (RemovalReason.EVICTED.equals(notification.getRemovalReason()) || RemovalReason.CAPACITY.equals(notification.getRemovalReason())) { @@ -212,31 +214,22 @@ public void onRemoval(StoreAwareCacheRemovalNotification notification) { } } listener.onRemoval(notification); - } - - @Override - public void onHit(K key, V value, CacheStoreType cacheStoreType) { - // Hits for tiered cache are tracked here itself. - } - - @Override - public void onCached(K key, V value, CacheStoreType cacheStoreType) { - // onCached events for tiered cache are tracked here itself. - } + }*/ - private Function> getValueFromTieredCache(boolean triggerEventListener) { + private Function, V> getValueFromTieredCache(boolean triggerEventListener) { return key -> { try (ReleasableLock ignore = readLock.acquire()) { - for (StoreAwareCache storeAwareCache : cacheList) { - V value = storeAwareCache.get(key); + for (ICache cache : cacheList) { + V value = cache.get(key); if (value != null) { if (triggerEventListener) { - listener.onHit(key, value, storeAwareCache.getTierType()); + //listener.onHit(key, value, cache.getTierType()); } - return new StoreAwareCacheValue<>(value, storeAwareCache.getTierType()); + //return new StoreAwareCacheValue<>(value, cache.getTierType()); + return value; } else { if (triggerEventListener) { - listener.onMiss(key, storeAwareCache.getTierType()); + //listener.onMiss(key, cache.getTierType()); } } } @@ -245,20 +238,14 @@ private Function> getValueFromTieredCache(boolean tri }; } - /** - * Stats for tiered spillover cache. - */ - /*class TieredSpillOverCacheStats implements CacheStats { + // A class which receives removal events from a tier present in the spillover cache. + private class TierRemovalListener implements RemovalListener, V> { @Override - public long count() { - long totalCount = 0; - for (StoreAwareCache storeAwareCache : cacheList) { - totalCount += storeAwareCache.count(); - } - return totalCount; + public void onRemoval(RemovalNotification, V> notification) { + // TODO } - }*/ + } /** * Builder object for tiered spillover cache. @@ -266,24 +253,24 @@ public long count() { * @param Type of value */ public static class Builder { - private StoreAwareCacheBuilder onHeapCacheBuilder; - private StoreAwareCacheBuilder onDiskCacheBuilder; - private StoreAwareCacheEventListener listener; + private ICacheBuilder onHeapCacheBuilder; + private ICacheBuilder onDiskCacheBuilder; + private RemovalListener, V> removalListener; public Builder() {} - public Builder setOnHeapCacheBuilder(StoreAwareCacheBuilder onHeapCacheBuilder) { + public Builder setOnHeapCacheBuilder(ICacheBuilder onHeapCacheBuilder) { this.onHeapCacheBuilder = onHeapCacheBuilder; return this; } - public Builder setOnDiskCacheBuilder(StoreAwareCacheBuilder onDiskCacheBuilder) { + public Builder setOnDiskCacheBuilder(ICacheBuilder onDiskCacheBuilder) { this.onDiskCacheBuilder = onDiskCacheBuilder; return this; } - public Builder setListener(StoreAwareCacheEventListener listener) { - this.listener = listener; + public Builder setRemovalListener(RemovalListener, V> listener) { + this.removalListener = listener; return this; } diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java new file mode 100644 index 0000000000000..6d1a5c982ea30 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java @@ -0,0 +1,112 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.stats.CacheStatsDimension; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.List; + +public class TieredSpilloverCacheStats implements CacheStats { + private final CacheStats heapStats; + private final CacheStats diskStats; + + public TieredSpilloverCacheStats(CacheStats heapStats, CacheStats diskStats) { + this.heapStats = heapStats; + this.diskStats = diskStats; + } + + // TODO: This is a skeleton implementation, not yet functional! + @Override + public void writeTo(StreamOutput out) throws IOException { + + } + + @Override + public long getTotalHits() { + return 0; + } + + @Override + public long getTotalMisses() { + return 0; + } + + @Override + public long getTotalEvictions() { + return 0; + } + + @Override + public long getTotalMemorySize() { + return 0; + } + + @Override + public long getTotalEntries() { + return 0; + } + + @Override + public long getHitsByDimension(CacheStatsDimension dimension) { + return 0; + } + + @Override + public long getMissesByDimension(CacheStatsDimension dimension) { + return 0; + } + + @Override + public long getEvictionsByDimension(CacheStatsDimension dimension) { + return 0; + } + + @Override + public long getMemorySizeByDimension(CacheStatsDimension dimension) { + return 0; + } + + @Override + public long getEntriesByDimension(CacheStatsDimension dimension) { + return 0; + } + + @Override + public void incrementHitsByDimensions(List dimensions) { + + } + + @Override + public void incrementMissesByDimensions(List dimensions) { + + } + + @Override + public void incrementEvictionsByDimensions(List dimensions) { + + } + + @Override + public void incrementMemorySizeByDimensions(List dimensions, long amountBytes) { + + } + + @Override + public void incrementEntriesByDimensions(List dimensions) { + + } + + @Override + public void decrementEntriesByDimensions(List dimensions) { + + } +} diff --git a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java index 057e7c9f66ce5..f7fa1859b5469 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java @@ -120,6 +120,12 @@ private StatsAndExpectedResults getPopulatedStats() { stats.incrementEntriesByDimensions(dimensions); expectedEntries.put(shardIdString, expectedEntries.get(shardIdString) + 1); } + + int numEntryDecrements = rand.nextInt(numEntryIncrements); + for (int i = 0; i < numEntryDecrements; i++) { + stats.decrementEntriesByDimensions(dimensions); + expectedEntries.put(shardIdString, expectedEntries.get(shardIdString) - 1); + } } Map> expectedShardResults = new HashMap<>(); expectedShardResults.put("hits", expectedHits); diff --git a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java index cce0449dc88b8..4e74f5ea09ccc 100644 --- a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java +++ b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java @@ -14,7 +14,7 @@ import org.opensearch.common.cache.store.OpenSearchOnHeapCache; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.metrics.CounterMetric; @@ -82,11 +82,11 @@ public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception { int diskCacheSize = randomIntBetween(60, 100); int totalSize = onHeapCacheSize + diskCacheSize; MockCacheEventListener eventListener = new MockCacheEventListener(); - StoreAwareCacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( + ICacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( onHeapCacheSize * 50 ).setWeigher((k, v) -> 50); // Will support onHeapCacheSize entries. - StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize) + ICacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize) .setDeliberateDelay(0); TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() @@ -224,7 +224,7 @@ public void testWithDiskTierNull() throws Exception { int onHeapCacheSize = randomIntBetween(10, 30); MockCacheEventListener eventListener = new MockCacheEventListener(); - StoreAwareCacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder() + ICacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder() .setMaximumWeightInBytes(onHeapCacheSize * 20) .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() @@ -528,11 +528,11 @@ public void testConcurrencyForEvictionFlow() throws Exception { MockCacheEventListener eventListener = new MockCacheEventListener<>(); - StoreAwareCacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( + ICacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( 200 ).setWeigher((k, v) -> 150); - StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize) + ICacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize) .setDeliberateDelay(500); TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() @@ -662,9 +662,9 @@ private TieredSpilloverCache intializeTieredSpilloverCache( StoreAwareCacheEventListener eventListener, long diskDeliberateDelay ) { - StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diksCacheSize) + ICacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diksCacheSize) .setDeliberateDelay(diskDeliberateDelay); - StoreAwareCacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder() + ICacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder() .setMaximumWeightInBytes(onHeapCacheSize * 20) .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries return new TieredSpilloverCache.Builder().setOnHeapCacheBuilder(onHeapCacheBuilder) @@ -774,7 +774,7 @@ public CacheStoreType getTierType() { return CacheStoreType.DISK; } - public static class Builder extends StoreAwareCacheBuilder { + public static class Builder extends ICacheBuilder { int maxSize; long delay; From 21b1078fbb9a3fd930a898103f61aa529d547408 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Tue, 9 Jan 2024 14:33:48 -0800 Subject: [PATCH 03/32] Adds Serializer interface and impls for Key and BytesReference Signed-off-by: Peter Alfonsi --- .../cache/tier/BytesReferenceSerializer.java | 42 ++++++++++++ .../common/cache/tier/Serializer.java | 37 ++++++++++ .../indices/IRCKeyWriteableSerializer.java | 63 +++++++++++++++++ .../tier/BytesReferenceSerializerTests.java | 67 +++++++++++++++++++ .../IRCKeyWriteableSerializerTests.java | 56 ++++++++++++++++ 5 files changed, 265 insertions(+) create mode 100644 server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java create mode 100644 server/src/main/java/org/opensearch/common/cache/tier/Serializer.java create mode 100644 server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java create mode 100644 server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java create mode 100644 server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java diff --git a/server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java b/server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java new file mode 100644 index 0000000000000..3ac30b09bddca --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java @@ -0,0 +1,42 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; + +import java.util.Arrays; + +/** + * A serializer which transforms BytesReference to byte[]. + * The type of BytesReference is NOT preserved after deserialization, but nothing in opensearch should care. + */ +public class BytesReferenceSerializer implements Serializer { + // This class does not get passed to ehcache itself, so it's not required that classes match after deserialization. + + public BytesReferenceSerializer() {} + + @Override + public byte[] serialize(BytesReference object) { + return BytesReference.toBytes(object); + } + + @Override + public BytesReference deserialize(byte[] bytes) { + if (bytes == null) { + return null; + } + return new BytesArray(bytes); + } + + @Override + public boolean equals(BytesReference object, byte[] bytes) { + return Arrays.equals(serialize(object), bytes); + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/tier/Serializer.java b/server/src/main/java/org/opensearch/common/cache/tier/Serializer.java new file mode 100644 index 0000000000000..04413fce4b8b0 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/tier/Serializer.java @@ -0,0 +1,37 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +/** + * An interface for serializers, to be used in disk caching tier and elsewhere. + * T is the class of the original object, and U is the serialized class. + */ +public interface Serializer { + /** + * Serializes an object. + * @param object A non-serialized object. + * @return The serialized representation of the object. + */ + U serialize(T object); + + /** + * Deserializes bytes into an object. + * @param bytes The serialized representation. + * @return The original object. + */ + T deserialize(U bytes); + + /** + * Compares an object to a serialized representation of an object. + * @param object A non-serialized objet + * @param bytes Serialized representation of an object + * @return true if representing the same object, false if not + */ + boolean equals(T object, U bytes); +} diff --git a/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java b/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java new file mode 100644 index 0000000000000..2a288c47981c5 --- /dev/null +++ b/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java @@ -0,0 +1,63 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.indices; + +import org.opensearch.OpenSearchException; +import org.opensearch.common.cache.tier.Serializer; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.BytesStreamInput; + +import java.io.IOException; +import java.util.Arrays; + +/** + * This class serializes the IndicesRequestCache.Key using its writeTo method. + */ +public class IRCKeyWriteableSerializer implements Serializer { + + + public IRCKeyWriteableSerializer() { + } + + @Override + public byte[] serialize(IndicesRequestCache.Key object) { + try { + BytesStreamOutput os = new BytesStreamOutput(); + object.writeTo(os); + return BytesReference.toBytes(os.bytes()); + } catch (IOException e) { + throw new OpenSearchException(e); + } + } + + @Override + public IndicesRequestCache.Key deserialize(byte[] bytes) { + if (bytes == null) { + return null; + } + try { + BytesStreamInput is = new BytesStreamInput(bytes, 0, bytes.length); + return new IndicesRequestCache.Key(is); + } catch (IOException e) { + throw new OpenSearchException(e); + } + } + + @Override + public boolean equals(IndicesRequestCache.Key object, byte[] bytes) { + // Deserialization is much slower than serialization for keys of order 1 KB, + // while time to serialize is fairly constant (per byte) + if (bytes.length < 5000) { + return Arrays.equals(serialize(object), bytes); + } else { + return object.equals(deserialize(bytes)); + } + } +} diff --git a/server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java b/server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java new file mode 100644 index 0000000000000..af81f04149ae6 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java @@ -0,0 +1,67 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.common.Randomness; +import org.opensearch.common.bytes.ReleasableBytesReference; +import org.opensearch.common.util.BigArrays; +import org.opensearch.common.util.PageCacheRecycler; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.bytes.CompositeBytesReference; +import org.opensearch.core.common.util.ByteArray; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Random; + +public class BytesReferenceSerializerTests extends OpenSearchTestCase { + public void testEquality() throws Exception { + BytesReferenceSerializer ser = new BytesReferenceSerializer(); + // Test that values are equal before and after serialization, for each implementation of BytesReference. + byte[] bytesValue = new byte[1000]; + Random rand = Randomness.get(); + rand.nextBytes(bytesValue); + + BytesReference ba = new BytesArray(bytesValue); + byte[] serialized = ser.serialize(ba); + assertTrue(ser.equals(ba, serialized)); + BytesReference deserialized = ser.deserialize(serialized); + assertEquals(ba, deserialized); + + ba = new BytesArray(new byte[] {}); + serialized = ser.serialize(ba); + assertTrue(ser.equals(ba, serialized)); + deserialized = ser.deserialize(serialized); + assertEquals(ba, deserialized); + + BytesReference cbr = CompositeBytesReference.of(new BytesArray(bytesValue), new BytesArray(bytesValue)); + serialized = ser.serialize(cbr); + assertTrue(ser.equals(cbr, serialized)); + deserialized = ser.deserialize(serialized); + assertEquals(cbr, deserialized); + + // We need the PagedBytesReference to be larger than the page size (16 KB) in order to actually create it + byte[] pbrValue = new byte[PageCacheRecycler.PAGE_SIZE_IN_BYTES * 2]; + rand.nextBytes(pbrValue); + ByteArray arr = BigArrays.NON_RECYCLING_INSTANCE.newByteArray(pbrValue.length); + arr.set(0L, pbrValue, 0, pbrValue.length); + assert !arr.hasArray(); + BytesReference pbr = BytesReference.fromByteArray(arr, pbrValue.length); + serialized = ser.serialize(pbr); + assertTrue(ser.equals(pbr, serialized)); + deserialized = ser.deserialize(serialized); + assertEquals(pbr, deserialized); + + BytesReference rbr = new ReleasableBytesReference(new BytesArray(bytesValue), ReleasableBytesReference.NO_OP); + serialized = ser.serialize(rbr); + assertTrue(ser.equals(rbr, serialized)); + deserialized = ser.deserialize(serialized); + assertEquals(rbr, deserialized); + } +} diff --git a/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java b/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java new file mode 100644 index 0000000000000..61484db6a51d5 --- /dev/null +++ b/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java @@ -0,0 +1,56 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.indices; + +import org.opensearch.common.Randomness; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.IndexService; +import org.opensearch.index.shard.IndexShard; +import org.opensearch.test.OpenSearchSingleNodeTestCase; + +import java.util.Random; +import java.util.UUID; + +public class IRCKeyWriteableSerializerTests extends OpenSearchSingleNodeTestCase { + + public void testSerializer() throws Exception { + IndexService indexService = createIndex("test"); + IndexShard indexShard = indexService.getShardOrNull(0); + IRCKeyWriteableSerializer ser = new IRCKeyWriteableSerializer(); + + int NUM_KEYS = 1000; + int[] valueLengths = new int[] { 1000, 6000 }; // test both branches in equals() + Random rand = Randomness.get(); + for (int valueLength : valueLengths) { + for (int i = 0; i < NUM_KEYS; i++) { + IndicesRequestCache.Key key = getRandomIRCKey(valueLength, rand, indexShard.shardId()); + byte[] serialized = ser.serialize(key); + assertTrue(ser.equals(key, serialized)); + IndicesRequestCache.Key deserialized = ser.deserialize(serialized); + assertTrue(key.equals(deserialized)); + } + } + } + + private IndicesRequestCache.Key getRandomIRCKey( + int valueLength, + Random random, + ShardId shard + ) { + byte[] value = new byte[valueLength]; + for (int i = 0; i < valueLength; i++) { + value[i] = (byte) (random.nextInt(126 - 32) + 32); + } + BytesReference keyValue = new BytesArray(value); + return new IndicesRequestCache.Key(shard, keyValue, UUID.randomUUID().toString()); // same UUID source as used in real key + } +} From a460acde930677315893229e77b718720dd0691e Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Thu, 8 Feb 2024 17:16:14 -0800 Subject: [PATCH 04/32] Adds ICacheKey serializer impl Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 72 +++++++++++++++--- .../cache/stats/CacheStatsDimension.java | 37 ++++++++- .../common/cache/stats/ICacheKey.java | 19 ++++- .../cache/tier/ICacheKeySerializer.java | 75 +++++++++++++++++++ .../cache/stats/CacheStatsDimensionTests.java | 41 ++++++++++ .../cache/tier/ICacheKeySerializerTests.java | 50 +++++++++++++ .../cache/tier/TieredSpilloverCacheTests.java | 13 ++-- 7 files changed, 287 insertions(+), 20 deletions(-) create mode 100644 server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java create mode 100644 server/src/test/java/org/opensearch/common/cache/stats/CacheStatsDimensionTests.java create mode 100644 server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 7b5ee34f7772e..2a0db42672349 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -10,6 +10,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.ehcache.core.spi.service.FileBasedPersistenceContext; +import org.ehcache.spi.serialization.SerializerException; import org.opensearch.OpenSearchException; import org.opensearch.cache.EhcacheSettings; import org.opensearch.common.SuppressForbidden; @@ -25,11 +27,13 @@ import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.tier.Serializer; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import java.io.File; +import java.nio.ByteBuffer; import java.time.Duration; import java.util.Iterator; import java.util.Map; @@ -87,7 +91,7 @@ public class EhcacheDiskCache implements ICache { private final PersistentCacheManager cacheManager; // Disk cache - private Cache cache; + private Cache cache; private final long maxWeightInBytes; private final String storagePath; private final Class keyType; @@ -101,6 +105,11 @@ public class EhcacheDiskCache implements ICache { private final String diskCacheAlias; private final String shardIdDimensionName; + private final Serializer keySerializer; + private final Serializer valueSerializer; + + + /** * Used in computeIfAbsent to synchronize loading of a given key. This is needed as ehcache doesn't provide a * computeIfAbsent method. @@ -157,6 +166,8 @@ private EhcacheDiskCache(Builder builder) { this.threadPoolAlias = builder.threadPoolAlias; } this.settings = Objects.requireNonNull(builder.getSettings(), "Settings objects shouldn't be null"); + this.keySerializer = Objects.requireNonNull(builder.keySerializer, "Key serializer shouldn't be null"); + this.valueSerializer = Objects.requireNonNull(builder.valueSerializer, "Value serializer shouldn't be null"); this.cacheManager = buildCacheManager(); this.ehCacheEventListener = new EhCacheEventListener(Objects.requireNonNull(builder.getRemovalListener(), "Removal listener can't be null")); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); @@ -164,27 +175,27 @@ private EhcacheDiskCache(Builder builder) { this.stats = new SingleDimensionCacheStats(shardIdDimensionName); } - private Cache buildCache(Duration expireAfterAccess, Builder builder) { + private Cache buildCache(Duration expireAfterAccess, Builder builder) { try { return this.cacheManager.createCache( this.diskCacheAlias, CacheConfigurationBuilder.newCacheConfigurationBuilder( ICacheKey.class, - this.valueType, + byte[].class, ResourcePoolsBuilder.newResourcePoolsBuilder().disk(maxWeightInBytes, MemoryUnit.B) ).withExpiry(new ExpiryPolicy<>() { @Override - public Duration getExpiryForCreation(ICacheKey key, V value) { + public Duration getExpiryForCreation(ICacheKey key, byte[] value) { return INFINITE; } @Override - public Duration getExpiryForAccess(ICacheKey key, Supplier value) { + public Duration getExpiryForAccess(ICacheKey key, Supplier value) { return expireAfterAccess; } @Override - public Duration getExpiryForUpdate(ICacheKey key, Supplier oldValue, V newValue) { + public Duration getExpiryForUpdate(ICacheKey key, Supplier oldValue, byte[] newValue) { return INFINITE; } }) @@ -200,6 +211,7 @@ public Duration getExpiryForUpdate(ICacheKey key, Supplier oldValue .get(settings) ) ) + .withKeySerializer(new KeySerializerWrapper(keySerializer)) ); } catch (IllegalArgumentException ex) { logger.error("Ehcache disk cache initialization failed due to illegal argument: {}", ex.getMessage()); @@ -262,7 +274,7 @@ public V get(ICacheKey key) { } V value; try { - value = cache.get(key); + value = valueSerializer.deserialize(cache.get(key)); } catch (CacheLoadingException ex) { throw new OpenSearchException("Exception occurred while trying to fetch item from ehcache disk cache"); } @@ -282,7 +294,7 @@ public V get(ICacheKey key) { @Override public void put(ICacheKey key, V value) { try { - cache.put(key, value); + cache.put(key, valueSerializer.serialize(value)); } catch (CacheWritingException ex) { throw new OpenSearchException("Exception occurred while put item to ehcache disk cache"); } @@ -300,7 +312,7 @@ public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> // Ehcache doesn't provide any computeIfAbsent function. Exposes putIfAbsent but that works differently and is // not performant in case there are multiple concurrent request for same key. Below is our own custom // implementation of computeIfAbsent on top of ehcache. Inspired by OpenSearch Cache implementation. - V value = cache.get(key); + V value = valueSerializer.deserialize(cache.get(key)); if (value == null) { value = compute(key, loader); } @@ -325,7 +337,7 @@ private V compute(ICacheKey key, LoadAwareCacheLoader, V> loader BiFunction, V>, Throwable, V> handler = (pair, ex) -> { V value = null; if (pair != null) { - cache.put(pair.v1(), pair.v2()); + cache.put(pair.v1(), valueSerializer.serialize(pair.v2())); value = pair.v2(); // Returning a value itself assuming that a next get should return the same. Should // be safe to assume if we got no exception and reached here. } @@ -434,9 +446,9 @@ public CacheStats stats() { */ class EhCacheKeyIterator implements Iterator> { - Iterator> iterator; + Iterator> iterator; - EhCacheKeyIterator(Iterator> iterator) { + EhCacheKeyIterator(Iterator> iterator) { this.iterator = iterator; } @@ -498,6 +510,30 @@ public void onEvent(CacheEvent, ? extends V> event) { } } + private class KeySerializerWrapper implements org.ehcache.spi.serialization.Serializer> { + + + + // This constructor must be present, but does not have to work as we are not actually persisting the disk + // cache after a restart. + // See https://www.ehcache.org/documentation/3.0/serializers-copiers.html#persistent-vs-transient-caches + public KeySerializerWrapper(ClassLoader classLoader, FileBasedPersistenceContext persistenceContext) {} + @Override + public ByteBuffer serialize(ICacheKey object) throws SerializerException { + return null; + } + + @Override + public ICacheKey read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return null; + } + + @Override + public boolean equals(ICacheKey object, ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return false; + } + } + /** * Factory to create an ehcache disk cache. */ @@ -560,6 +596,8 @@ public static class Builder extends ICacheBuilder { private Class valueType; private String shardIdDimensionName; + private Serializer keySerializer; + private Serializer valueSerializer; /** * Default constructor. Added to fix javadocs. @@ -641,6 +679,16 @@ public Builder setShardIdDimensionName(String dimensionName) { return this; } + public Builder setKeySerializer(Serializer keySerializer) { + this.keySerializer = keySerializer; + return this; + } + + public Builder setValueSerializer(Serializer valueSerializer) { + this.valueSerializer = valueSerializer; + return this; + } + //@Override public EhcacheDiskCache build() { return new EhcacheDiskCache<>(this); diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java index 83e022325e4bb..e66676599957a 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java @@ -8,11 +8,46 @@ package org.opensearch.common.cache.stats; -public class CacheStatsDimension { +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; + +import java.io.IOException; + +public class CacheStatsDimension implements Writeable { public final String dimensionName; public final String dimensionValue; public CacheStatsDimension(String dimensionName, String dimensionValue) { this.dimensionName = dimensionName; this.dimensionValue = dimensionValue; } + + public CacheStatsDimension(StreamInput in) throws IOException { + this.dimensionName = in.readString(); + this.dimensionValue = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(dimensionName); + out.writeString(dimensionValue); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null) { + return false; + } + if (o.getClass() != CacheStatsDimension.class) { + return false; + } + CacheStatsDimension other = (CacheStatsDimension) o; + if (other.dimensionName == null || other.dimensionValue == null) { + return false; + } + return other.dimensionName.equals(dimensionName) && other.dimensionValue.equals(dimensionValue); + } } diff --git a/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java b/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java index bcbc30f84f899..4c0e86b1ec901 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java @@ -8,14 +8,31 @@ package org.opensearch.common.cache.stats; +import org.opensearch.common.cache.tier.ICacheKeySerializer; + import java.util.List; public class ICacheKey { - public final K key; + public final K key; // K must implement equals() public final List dimensions; public ICacheKey(K key, List dimensions) { this.key = key; this.dimensions = dimensions; } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null) { + return false; + } + if (o.getClass() != ICacheKey.class) { + return false; + } + ICacheKey other = (ICacheKey) o; + return key.equals(other.key) && dimensions.equals(other.dimensions); + } } diff --git a/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java b/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java new file mode 100644 index 0000000000000..855e9031f7739 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java @@ -0,0 +1,75 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.OpenSearchException; +import org.opensearch.common.cache.stats.CacheStatsDimension; +import org.opensearch.common.cache.stats.ICacheKey; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.BytesStreamInput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class ICacheKeySerializer implements Serializer, byte[]> { + + public Serializer keySerializer; + + public ICacheKeySerializer(Serializer serializer) { + this.keySerializer = serializer; + } + + @Override + public byte[] serialize(ICacheKey object) { + byte[] serializedKey = keySerializer.serialize(object.key); + try { + BytesStreamOutput os = new BytesStreamOutput(); + // First write the number of dimensions + os.writeInt(object.dimensions.size()); + for (CacheStatsDimension dim : object.dimensions) { + dim.writeTo(os); + } + os.writeVInt(serializedKey.length); // ?? Is the read byte[] fn broken such that we have to do this? + os.writeBytes(serializedKey); // TODO: Is this re-copying unnecessarily? Come back to this + return BytesReference.toBytes(os.bytes()); + } catch (IOException e) { + throw new OpenSearchException(e); + } + } + + @Override + public ICacheKey deserialize(byte[] bytes) { + if (bytes == null) { + return null; + } + List dimensionList = new ArrayList<>(); + try { + BytesStreamInput is = new BytesStreamInput(bytes, 0, bytes.length); + int numDimensions = is.readInt(); + for (int i = 0; i < numDimensions; i++) { + dimensionList.add(new CacheStatsDimension(is)); + } + int length = is.readVInt(); + + byte[] serializedKey = new byte[length]; + is.readBytes(serializedKey, 0, length); // not sure why is.readByteArray doesn't work?? + return new ICacheKey<>(keySerializer.deserialize(serializedKey), dimensionList); + } catch (IOException e) { + throw new OpenSearchException(e); + } + } + + @Override + public boolean equals(ICacheKey object, byte[] bytes) { + return Arrays.equals(serialize(object), bytes); + } +} diff --git a/server/src/test/java/org/opensearch/common/cache/stats/CacheStatsDimensionTests.java b/server/src/test/java/org/opensearch/common/cache/stats/CacheStatsDimensionTests.java new file mode 100644 index 0000000000000..0af171191cdd4 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/stats/CacheStatsDimensionTests.java @@ -0,0 +1,41 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.test.OpenSearchTestCase; + +public class CacheStatsDimensionTests extends OpenSearchTestCase { + public void testSerialization() throws Exception { + String name = "dimension_name"; + String value = "dimension_value"; + CacheStatsDimension dim = new CacheStatsDimension(name, value); + + BytesStreamOutput os = new BytesStreamOutput(); + dim.writeTo(os); + BytesStreamInput is = new BytesStreamInput(BytesReference.toBytes(os.bytes())); + CacheStatsDimension deserialized = new CacheStatsDimension(is); + + assertEquals(dim.dimensionName, deserialized.dimensionName); + assertEquals(dim.dimensionValue, deserialized.dimensionValue); + assertEquals(dim, deserialized); + } + + public void testEquality() throws Exception { + String name = "dimension_name"; + String value = "dimension_value"; + CacheStatsDimension dim = new CacheStatsDimension(name, value); + assertEquals(dim, new CacheStatsDimension(name, value)); + assertNotEquals(dim, new CacheStatsDimension("a", "b")); + assertNotEquals(dim, null); + assertNotEquals(dim, new CacheStatsDimension(null, null)); + } +} diff --git a/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java new file mode 100644 index 0000000000000..8dcbb39de3125 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java @@ -0,0 +1,50 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.common.Randomness; +import org.opensearch.common.cache.stats.CacheStatsDimension; +import org.opensearch.common.cache.stats.ICacheKey; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.List; +import java.util.Random; +import java.util.UUID; + +public class ICacheKeySerializerTests extends OpenSearchTestCase { + // For these tests, we use BytesReference as K, since we already have a Serializer implementation + public void testEquality() throws Exception { + BytesReferenceSerializer keySer = new BytesReferenceSerializer(); + ICacheKeySerializer serializer = new ICacheKeySerializer<>(keySer); + + int numDimensionsTested = 10; + for (int i = 0; i < numDimensionsTested; i++) { + CacheStatsDimension dim = getRandomDim(); + ICacheKey key = new ICacheKey<>(getRandomBytesReference(), List.of(dim)); + byte[] serialized = serializer.serialize(key); + assertTrue(serializer.equals(key, serialized)); + ICacheKey deserialized = serializer.deserialize(serialized); + assertEquals(key, deserialized); + } + } + + private CacheStatsDimension getRandomDim() { + return new CacheStatsDimension(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + } + + private BytesReference getRandomBytesReference() { + byte[] bytesValue = new byte[1000]; + Random rand = Randomness.get(); + rand.nextBytes(bytesValue); + return new BytesArray(bytesValue); + } + +} diff --git a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java index 4e74f5ea09ccc..158b7be2315f8 100644 --- a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java +++ b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java @@ -11,6 +11,7 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.stats.ICacheKey; import org.opensearch.common.cache.store.OpenSearchOnHeapCache; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; @@ -34,7 +35,7 @@ public class TieredSpilloverCacheTests extends OpenSearchTestCase { - public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception { + /*public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception { int onHeapCacheSize = randomIntBetween(10, 30); MockCacheEventListener eventListener = new MockCacheEventListener(); TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( @@ -49,7 +50,7 @@ public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception for (int iter = 0; iter < numOfItems1; iter++) { String key = UUID.randomUUID().toString(); keys.add(key); - LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + LoadAwareCacheLoader, String> tieredCacheLoader = getLoadAwareCacheLoader(); tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); } assertEquals(numOfItems1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); @@ -639,12 +640,12 @@ class TestStatsHolder { } } - private LoadAwareCacheLoader getLoadAwareCacheLoader() { - return new LoadAwareCacheLoader() { + private LoadAwareCacheLoader, String> getLoadAwareCacheLoader() { + return new LoadAwareCacheLoader, String>() { boolean isLoaded = false; @Override - public String load(String key) { + public String load(ICacheKey key) { isLoaded = true; return UUID.randomUUID().toString(); } @@ -793,5 +794,5 @@ public Builder setDeliberateDelay(long millis) { this.delay = millis; return this; } - } + }*/ } From 31ee9aedecde8ed999ff949d90d4c2ba6734dd2f Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Fri, 9 Feb 2024 09:56:16 -0800 Subject: [PATCH 05/32] Added more icachekey serializer tests Signed-off-by: Peter Alfonsi --- .../cache/tier/ICacheKeySerializer.java | 3 ++ .../cache/tier/ICacheKeySerializerTests.java | 30 ++++++++++++++++++- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java b/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java index 855e9031f7739..95371b87c32a9 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java @@ -30,6 +30,9 @@ public ICacheKeySerializer(Serializer serializer) { @Override public byte[] serialize(ICacheKey object) { + if (object == null || object.key == null || object.dimensions == null) { + return null; + } byte[] serializedKey = keySerializer.serialize(object.key); try { BytesStreamOutput os = new BytesStreamOutput(); diff --git a/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java index 8dcbb39de3125..5cc8e6167902a 100644 --- a/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java +++ b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java @@ -15,6 +15,7 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.test.OpenSearchTestCase; +import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.UUID; @@ -25,7 +26,7 @@ public void testEquality() throws Exception { BytesReferenceSerializer keySer = new BytesReferenceSerializer(); ICacheKeySerializer serializer = new ICacheKeySerializer<>(keySer); - int numDimensionsTested = 10; + int numDimensionsTested = 100; for (int i = 0; i < numDimensionsTested; i++) { CacheStatsDimension dim = getRandomDim(); ICacheKey key = new ICacheKey<>(getRandomBytesReference(), List.of(dim)); @@ -36,6 +37,33 @@ public void testEquality() throws Exception { } } + public void testDimNumbers() throws Exception { + BytesReferenceSerializer keySer = new BytesReferenceSerializer(); + ICacheKeySerializer serializer = new ICacheKeySerializer<>(keySer); + + for (int numDims : new int[]{0, 5, 1000}) { + List dims = new ArrayList<>(); + for (int j = 0; j < numDims; j++) { + dims.add(getRandomDim()); + } + ICacheKey key = new ICacheKey<>(getRandomBytesReference(), dims); + byte[] serialized = serializer.serialize(key); + assertTrue(serializer.equals(key, serialized)); + ICacheKey deserialized = serializer.deserialize(serialized); + assertEquals(key, deserialized); + } + } + public void testNullInputs() throws Exception { + BytesReferenceSerializer keySer = new BytesReferenceSerializer(); + ICacheKeySerializer serializer = new ICacheKeySerializer<>(keySer); + + assertNull(serializer.deserialize(null)); + ICacheKey nullKey = new ICacheKey<>(null, List.of(getRandomDim())); + assertNull(serializer.serialize(nullKey)); + assertNull(serializer.serialize(null)); + assertNull(serializer.serialize(new ICacheKey<>(getRandomBytesReference(), null))); + } + private CacheStatsDimension getRandomDim() { return new CacheStatsDimension(UUID.randomUUID().toString(), UUID.randomUUID().toString()); } From 9069077a911fa7272bf32081cdcb8e31cde46a26 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Fri, 9 Feb 2024 13:49:42 -0800 Subject: [PATCH 06/32] Attempts to fix ehcache hits Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 25 +++++++--- .../store/disk/EhCacheDiskCacheTests.java | 50 +++++++++++++++++-- .../cache/tier/ICacheKeySerializer.java | 10 ++-- .../cache/tier/ICacheKeySerializerTests.java | 4 +- 4 files changed, 74 insertions(+), 15 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 2a0db42672349..708cb2a1387b7 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -27,6 +27,7 @@ import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.tier.ICacheKeySerializer; import org.opensearch.common.cache.tier.Serializer; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; @@ -274,7 +275,8 @@ public V get(ICacheKey key) { } V value; try { - value = valueSerializer.deserialize(cache.get(key)); + byte[] serializedValue = cache.get(key); + value = valueSerializer.deserialize(serializedValue); } catch (CacheLoadingException ex) { throw new OpenSearchException("Exception occurred while trying to fetch item from ehcache disk cache"); } @@ -485,6 +487,7 @@ public void onEvent(CacheEvent, ? extends V> event) { switch (event.getType()) { case CREATED: stats.incrementEntriesByDimensions(event.getKey().dimensions); + // TODO: Add memory values in all of these cases! assert event.getOldValue() == null; break; case EVICTED: @@ -510,7 +513,11 @@ public void onEvent(CacheEvent, ? extends V> event) { } } - private class KeySerializerWrapper implements org.ehcache.spi.serialization.Serializer> { + private class KeySerializerWrapper implements org.ehcache.spi.serialization.Serializer { + private ICacheKeySerializer serializer; + public KeySerializerWrapper(Serializer internalKeySerializer) { + this.serializer = new ICacheKeySerializer<>(internalKeySerializer); + } @@ -519,18 +526,22 @@ private class KeySerializerWrapper implements org.ehcache.spi.serialization.S // See https://www.ehcache.org/documentation/3.0/serializers-copiers.html#persistent-vs-transient-caches public KeySerializerWrapper(ClassLoader classLoader, FileBasedPersistenceContext persistenceContext) {} @Override - public ByteBuffer serialize(ICacheKey object) throws SerializerException { - return null; + public ByteBuffer serialize(ICacheKey object) throws SerializerException { + return ByteBuffer.wrap(serializer.serialize(object)); } @Override public ICacheKey read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return null; + byte[] arr = new byte[binary.remaining()]; + binary.get(arr); + return serializer.deserialize(arr); } @Override - public boolean equals(ICacheKey object, ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return false; + public boolean equals(ICacheKey object, ByteBuffer binary) throws ClassNotFoundException, SerializerException { + byte[] arr = new byte[binary.remaining()]; + binary.get(arr); + return serializer.equals(object, arr); } } diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 088b1d8eac2b6..25b9596c9b11b 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -18,12 +18,15 @@ import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.cache.tier.Serializer; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.env.NodeEnvironment; import org.opensearch.test.OpenSearchSingleNodeTestCase; import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -50,22 +53,38 @@ public void testBasicGetAndPut() throws IOException { .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(mockRemovalListener) .build(); - int randomKeys = randomIntBetween(10, 100); + int randomKeys = 1; //randomIntBetween(10, 100); Map keyValueMap = new HashMap<>(); for (int i = 0; i < randomKeys; i++) { - keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + //keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + keyValueMap.put("test_key", "test_value"); } for (Map.Entry entry : keyValueMap.entrySet()) { ehcacheTest.put(getICacheKey(entry.getKey()), entry.getValue()); + System.out.println("Current # entries = " + ehcacheTest.stats().getTotalEntries()); } for (Map.Entry entry : keyValueMap.entrySet()) { - String value = ehcacheTest.get(getICacheKey(entry.getKey())); + ICacheKey key = getICacheKey(entry.getKey()); + /*for (ICacheKey cacheKey : ehcacheTest.keys()) { + System.out.println("Key from cache:"); + System.out.println("Dimensions = " + cacheKey.dimensions); + System.out.println("K = " + cacheKey.key); + System.out.println("Key from map:"); + System.out.println("Dimensions = " + key.dimensions); + System.out.println("K = " + key.key); + System.out.println("Equality = " + cacheKey.equals(key)); + // Confirmed via these printouts that the keys are equal. So why is there a miss? + }*/ + String value = ehcacheTest.get(key); assertEquals(entry.getValue(), value); } //assertEquals(randomKeys, mockEventListener.onCachedCount.get()); @@ -541,4 +560,29 @@ public void onRemoval(RemovalNotification, V> notification) { onRemovalCount.incrementAndGet(); } } + + private static class StringSerializer implements Serializer { + private final Charset charset = StandardCharsets.UTF_8; + + public StringSerializer() { + int i = 0; // remove, for debug breakpoint + } + @Override + public byte[] serialize(String object) { + return object.getBytes(charset); + } + + @Override + public String deserialize(byte[] bytes) { + if (bytes == null) { + return null; + } + return new String(bytes, charset); + } + + @Override + public boolean equals(String object, byte[] bytes) { + return object.equals(deserialize(bytes)); + } + } } diff --git a/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java b/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java index 95371b87c32a9..0e0b83495ea49 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java @@ -8,6 +8,7 @@ package org.opensearch.common.cache.tier; +import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.common.cache.stats.ICacheKey; @@ -37,13 +38,14 @@ public byte[] serialize(ICacheKey object) { try { BytesStreamOutput os = new BytesStreamOutput(); // First write the number of dimensions - os.writeInt(object.dimensions.size()); + os.writeVInt(object.dimensions.size()); for (CacheStatsDimension dim : object.dimensions) { dim.writeTo(os); } os.writeVInt(serializedKey.length); // ?? Is the read byte[] fn broken such that we have to do this? os.writeBytes(serializedKey); // TODO: Is this re-copying unnecessarily? Come back to this - return BytesReference.toBytes(os.bytes()); + byte[] finalBytes = BytesReference.toBytes(os.bytes()); + return finalBytes; } catch (IOException e) { throw new OpenSearchException(e); } @@ -57,12 +59,12 @@ public ICacheKey deserialize(byte[] bytes) { List dimensionList = new ArrayList<>(); try { BytesStreamInput is = new BytesStreamInput(bytes, 0, bytes.length); - int numDimensions = is.readInt(); + int numDimensions = is.readVInt(); for (int i = 0; i < numDimensions; i++) { dimensionList.add(new CacheStatsDimension(is)); } - int length = is.readVInt(); + int length = is.readVInt(); byte[] serializedKey = new byte[length]; is.readBytes(serializedKey, 0, length); // not sure why is.readByteArray doesn't work?? return new ICacheKey<>(keySerializer.deserialize(serializedKey), dimensionList); diff --git a/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java index 5cc8e6167902a..2dbf7b97716ab 100644 --- a/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java +++ b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java @@ -15,6 +15,8 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.test.OpenSearchTestCase; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Random; @@ -34,6 +36,7 @@ public void testEquality() throws Exception { assertTrue(serializer.equals(key, serialized)); ICacheKey deserialized = serializer.deserialize(serialized); assertEquals(key, deserialized); + assertTrue(serializer.equals(deserialized, serialized)); } } @@ -74,5 +77,4 @@ private BytesReference getRandomBytesReference() { rand.nextBytes(bytesValue); return new BytesArray(bytesValue); } - } From 18ddb23765609623d29c0399216a889c049197b2 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Fri, 9 Feb 2024 14:07:10 -0800 Subject: [PATCH 07/32] Fixed unexpected misses by implementing hashCode for ICacheKey and CacheStatsDimension Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 29 +------------------ .../store/disk/EhCacheDiskCacheTests.java | 23 ++------------- .../cache/stats/CacheStatsDimension.java | 6 ++++ .../common/cache/stats/ICacheKey.java | 7 ++++- .../cache/tier/ICacheKeySerializerTests.java | 8 +++++ 5 files changed, 24 insertions(+), 49 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 708cb2a1387b7..742f3a3e82607 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -117,32 +117,6 @@ public class EhcacheDiskCache implements ICache { */ Map, CompletableFuture, V>>> completableFutureMap = new ConcurrentHashMap<>(); - // I think we need this to instantiate the cache. We can't pass in values like ICacheKey.class to builders - // due to type erasure. - private class EhcacheKeyWrapper { - private final ICacheKey key; - public EhcacheKeyWrapper(ICacheKey key) { - this.key = key; - } - ICacheKey getKey() { - return key; - } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null) { - return false; - } - if (o.getClass() != EhcacheKeyWrapper.class) { - return false; - } - EhcacheKeyWrapper other = (EhcacheKeyWrapper) o; - return other.getKey().equals(key); - } - } - private EhcacheDiskCache(Builder builder) { this.keyType = Objects.requireNonNull(builder.keyType, "Key type shouldn't be null"); this.valueType = Objects.requireNonNull(builder.valueType, "Value type shouldn't be null"); @@ -275,8 +249,7 @@ public V get(ICacheKey key) { } V value; try { - byte[] serializedValue = cache.get(key); - value = valueSerializer.deserialize(serializedValue); + value = valueSerializer.deserialize(cache.get(key)); } catch (CacheLoadingException ex) { throw new OpenSearchException("Exception occurred while trying to fetch item from ehcache disk cache"); } diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 25b9596c9b11b..690f90f49afe7 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -62,29 +62,16 @@ public void testBasicGetAndPut() throws IOException { .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(mockRemovalListener) .build(); - int randomKeys = 1; //randomIntBetween(10, 100); + int randomKeys = randomIntBetween(10, 100); Map keyValueMap = new HashMap<>(); for (int i = 0; i < randomKeys; i++) { - //keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); - keyValueMap.put("test_key", "test_value"); + keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); } for (Map.Entry entry : keyValueMap.entrySet()) { ehcacheTest.put(getICacheKey(entry.getKey()), entry.getValue()); - System.out.println("Current # entries = " + ehcacheTest.stats().getTotalEntries()); } for (Map.Entry entry : keyValueMap.entrySet()) { - ICacheKey key = getICacheKey(entry.getKey()); - /*for (ICacheKey cacheKey : ehcacheTest.keys()) { - System.out.println("Key from cache:"); - System.out.println("Dimensions = " + cacheKey.dimensions); - System.out.println("K = " + cacheKey.key); - System.out.println("Key from map:"); - System.out.println("Dimensions = " + key.dimensions); - System.out.println("K = " + key.key); - System.out.println("Equality = " + cacheKey.equals(key)); - // Confirmed via these printouts that the keys are equal. So why is there a miss? - }*/ - String value = ehcacheTest.get(key); + String value = ehcacheTest.get(getICacheKey(entry.getKey())); assertEquals(entry.getValue(), value); } //assertEquals(randomKeys, mockEventListener.onCachedCount.get()); @@ -563,10 +550,6 @@ public void onRemoval(RemovalNotification, V> notification) { private static class StringSerializer implements Serializer { private final Charset charset = StandardCharsets.UTF_8; - - public StringSerializer() { - int i = 0; // remove, for debug breakpoint - } @Override public byte[] serialize(String object) { return object.getBytes(charset); diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java index e66676599957a..93956fbeb42f4 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java @@ -13,6 +13,7 @@ import org.opensearch.core.common.io.stream.Writeable; import java.io.IOException; +import java.util.Objects; public class CacheStatsDimension implements Writeable { public final String dimensionName; @@ -50,4 +51,9 @@ public boolean equals(Object o) { } return other.dimensionName.equals(dimensionName) && other.dimensionValue.equals(dimensionValue); } + + @Override + public int hashCode() { + return Objects.hash(dimensionName, dimensionValue); + } } diff --git a/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java b/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java index 4c0e86b1ec901..72c51259b464d 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java @@ -8,9 +8,9 @@ package org.opensearch.common.cache.stats; -import org.opensearch.common.cache.tier.ICacheKeySerializer; import java.util.List; +import java.util.Objects; public class ICacheKey { public final K key; // K must implement equals() @@ -35,4 +35,9 @@ public boolean equals(Object o) { ICacheKey other = (ICacheKey) o; return key.equals(other.key) && dimensions.equals(other.dimensions); } + + @Override + public int hashCode() { + return Objects.hash(key, dimensions); + } } diff --git a/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java index 2dbf7b97716ab..c1f062276dbfa 100644 --- a/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java +++ b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java @@ -56,6 +56,14 @@ public void testDimNumbers() throws Exception { assertEquals(key, deserialized); } } + + public void testHashCodes() throws Exception { + ICacheKey key1 = new ICacheKey<>("key", List.of(new CacheStatsDimension("dimension_name", "dimension_value"))); + ICacheKey key2 = new ICacheKey<>("key", List.of(new CacheStatsDimension("dimension_name", "dimension_value"))); + + assertEquals(key1, key2); + assertEquals(key1.hashCode(), key2.hashCode()); + } public void testNullInputs() throws Exception { BytesReferenceSerializer keySer = new BytesReferenceSerializer(); ICacheKeySerializer serializer = new ICacheKeySerializer<>(keySer); From 86184588fe8edba846455af887b13d056842dfdd Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Mon, 12 Feb 2024 12:23:19 -0800 Subject: [PATCH 08/32] Adds memory tracking to disk tier Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 63 +++++++++++++------ .../store/disk/EhCacheDiskCacheTests.java | 39 ++++++++++-- 2 files changed, 80 insertions(+), 22 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 742f3a3e82607..c52177354a738 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -45,6 +45,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.function.Supplier; import java.util.function.ToLongBiFunction; @@ -109,8 +110,6 @@ public class EhcacheDiskCache implements ICache { private final Serializer keySerializer; private final Serializer valueSerializer; - - /** * Used in computeIfAbsent to synchronize loading of a given key. This is needed as ehcache doesn't provide a * computeIfAbsent method. @@ -144,7 +143,11 @@ private EhcacheDiskCache(Builder builder) { this.keySerializer = Objects.requireNonNull(builder.keySerializer, "Key serializer shouldn't be null"); this.valueSerializer = Objects.requireNonNull(builder.valueSerializer, "Value serializer shouldn't be null"); this.cacheManager = buildCacheManager(); - this.ehCacheEventListener = new EhCacheEventListener(Objects.requireNonNull(builder.getRemovalListener(), "Removal listener can't be null")); + this.ehCacheEventListener = new EhCacheEventListener( + Objects.requireNonNull(builder.getRemovalListener(), "Removal listener can't be null"), + Objects.requireNonNull(builder.keySizeFunction, "Key sizing function shouldn't be null"), + Objects.requireNonNull(builder.valueSizeFunction, "Value sizing function shouldn't be null"), + this.valueSerializer); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); this.shardIdDimensionName = Objects.requireNonNull(builder.shardIdDimensionName, "Dimension name can't be null"); this.stats = new SingleDimensionCacheStats(shardIdDimensionName); @@ -446,39 +449,59 @@ public ICacheKey next() { * @param Type of key * @param Type of value */ - class EhCacheEventListener implements CacheEventListener, V> { + class EhCacheEventListener implements CacheEventListener, byte[]> { //private final StoreAwareCacheEventListener eventListener; private final RemovalListener, V> removalListener; + private Function, Long> keySizeFunction; + private Function valueSizeFunction; + private Serializer valueSerializer; - EhCacheEventListener(RemovalListener, V> removalListener) { + EhCacheEventListener(RemovalListener, V> removalListener, + Function, Long> keySizeFunction, + Function valueSizeFunction, + Serializer valueSerializer) { this.removalListener = removalListener; + this.keySizeFunction = keySizeFunction; + this.valueSizeFunction = valueSizeFunction; + this.valueSerializer = valueSerializer; + } + + private long getKeyAndOldValueSize(CacheEvent, ? extends byte[]> event) { + return keySizeFunction.apply(event.getKey()) + valueSizeFunction.apply(valueSerializer.deserialize(event.getOldValue())); } @Override - public void onEvent(CacheEvent, ? extends V> event) { + public void onEvent(CacheEvent, ? extends byte[]> event) { switch (event.getType()) { case CREATED: stats.incrementEntriesByDimensions(event.getKey().dimensions); - // TODO: Add memory values in all of these cases! + long totalSize = keySizeFunction.apply(event.getKey()) + valueSizeFunction.apply(valueSerializer.deserialize(event.getNewValue())); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, totalSize); assert event.getOldValue() == null; break; case EVICTED: - this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.EVICTED)); + this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.EVICTED)); stats.decrementEntriesByDimensions(event.getKey().dimensions); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getKeyAndOldValueSize(event)); assert event.getNewValue() == null; break; case REMOVED: - this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.EXPLICIT)); + this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.EXPLICIT)); stats.decrementEntriesByDimensions(event.getKey().dimensions); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getKeyAndOldValueSize(event)); assert event.getNewValue() == null; break; case EXPIRED: - this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.INVALIDATED)); + this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.INVALIDATED)); stats.decrementEntriesByDimensions(event.getKey().dimensions); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getKeyAndOldValueSize(event)); assert event.getNewValue() == null; break; case UPDATED: + long newKeySize = valueSizeFunction.apply(valueSerializer.deserialize(event.getNewValue())); + long oldKeySize = valueSizeFunction.apply(valueSerializer.deserialize(event.getOldValue())); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, newKeySize - oldKeySize); break; default: break; @@ -492,8 +515,6 @@ public KeySerializerWrapper(Serializer internalKeySerializer) { this.serializer = new ICacheKeySerializer<>(internalKeySerializer); } - - // This constructor must be present, but does not have to work as we are not actually persisting the disk // cache after a restart. // See https://www.ehcache.org/documentation/3.0/serializers-copiers.html#persistent-vs-transient-caches @@ -564,24 +585,20 @@ public String getCacheName() { * @param Type of value */ public static class Builder extends ICacheBuilder { - // TODO: Should inherit from whatever new thing Sagar adds (ICacheBuilder?) - private CacheType cacheType; private String storagePath; - private String threadPoolAlias; - private String diskCacheAlias; // Provides capability to make ehCache event listener to run in sync mode. Used for testing too. private boolean isEventListenerModeSync; - private Class keyType; - private Class valueType; private String shardIdDimensionName; private Serializer keySerializer; private Serializer valueSerializer; + private Function, Long> keySizeFunction; + private Function valueSizeFunction; /** * Default constructor. Added to fix javadocs. @@ -673,6 +690,16 @@ public Builder setValueSerializer(Serializer valueSerializer) { return this; } + public Builder setKeySizeFunction(Function, Long> fn) { + this.keySizeFunction = fn; + return this; + } + + public Builder setValueSizeFunction(Function fn) { + this.valueSizeFunction = fn; + return this; + } + //@Override public EhcacheDiskCache build() { return new EhcacheDiskCache<>(this); diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 690f90f49afe7..70e5b938b7a1f 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -37,6 +37,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Phaser; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; import static org.hamcrest.CoreMatchers.instanceOf; @@ -48,6 +49,8 @@ public class EhCacheDiskCacheTests extends OpenSearchSingleNodeTestCase { public void testBasicGetAndPut() throws IOException { Settings settings = Settings.builder().build(); MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); + Function, Long> keySizeFunction = getKeyWeigherFn(); + Function valueSizeFunction = getValueWeigherFn(); try (NodeEnvironment env = newNodeEnvironment(settings)) { ICache ehcacheTest = new EhcacheDiskCache.Builder().setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") @@ -56,6 +59,8 @@ public void testBasicGetAndPut() throws IOException { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) + .setKeySizeFunction(keySizeFunction) + .setValueSizeFunction(valueSizeFunction) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -63,19 +68,27 @@ public void testBasicGetAndPut() throws IOException { .setRemovalListener(mockRemovalListener) .build(); int randomKeys = randomIntBetween(10, 100); + long expectedSize = 0; Map keyValueMap = new HashMap<>(); for (int i = 0; i < randomKeys; i++) { keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); } for (Map.Entry entry : keyValueMap.entrySet()) { - ehcacheTest.put(getICacheKey(entry.getKey()), entry.getValue()); + ICacheKey iCacheKey = getICacheKey(entry.getKey()); + ehcacheTest.put(iCacheKey, entry.getValue()); + expectedSize += keySizeFunction.apply(iCacheKey); + expectedSize += valueSizeFunction.apply(entry.getValue()); } for (Map.Entry entry : keyValueMap.entrySet()) { String value = ehcacheTest.get(getICacheKey(entry.getKey())); assertEquals(entry.getValue(), value); } - //assertEquals(randomKeys, mockEventListener.onCachedCount.get()); - //assertEquals(randomKeys, mockEventListener.onHitCount.get()); + assertEquals(randomKeys, ehcacheTest.stats().getTotalEntries()); + assertEquals(randomKeys, ehcacheTest.stats().getEntriesByDimension(getMockDimensions().get(0))); + assertEquals(randomKeys, ehcacheTest.stats().getTotalHits()); + assertEquals(randomKeys, ehcacheTest.stats().getHitsByDimension(getMockDimensions().get(0))); + assertEquals(expectedSize, ehcacheTest.stats().getTotalMemorySize()); + assertEquals(expectedSize, ehcacheTest.stats().getMemorySizeByDimension(getMockDimensions().get(0))); // Validate misses int expectedNumberOfMisses = randomIntBetween(10, 200); @@ -83,7 +96,8 @@ public void testBasicGetAndPut() throws IOException { ehcacheTest.get(getICacheKey(UUID.randomUUID().toString())); } - //assertEquals(expectedNumberOfMisses, mockEventListener.onMissCount.get()); + assertEquals(expectedNumberOfMisses, ehcacheTest.stats().getTotalMisses()); + assertEquals(expectedNumberOfMisses, ehcacheTest.stats().getMissesByDimension(getMockDimensions().get(0))); ehcacheTest.close(); } } @@ -506,6 +520,23 @@ private ICacheKey getICacheKey(String key) { return new ICacheKey<>(key, getMockDimensions()); } + private Function, Long> getKeyWeigherFn() { + // TODO: Should this function come from the serializer impl? + return (iCacheKey) -> { + long totalSize = iCacheKey.key.length(); + for (CacheStatsDimension dim : iCacheKey.dimensions) { + totalSize += dim.dimensionName.length(); + totalSize += dim.dimensionValue.length(); + } + totalSize += 10; // The ICacheKeySerializer writes 2 VInts to record array lengths, which can be 1-5 bytes each + return totalSize; + }; + } + + private Function getValueWeigherFn() { + return (value) -> (long) value.length(); + } + class MockEventListener implements StoreAwareCacheEventListener { AtomicInteger onMissCount = new AtomicInteger(); From 46b3e49edcfcd2116b27ddbb1383be56783b5414 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Mon, 12 Feb 2024 12:31:44 -0800 Subject: [PATCH 09/32] Updated other tests Signed-off-by: Peter Alfonsi --- .../store/disk/EhCacheDiskCacheTests.java | 82 +++++++++---------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 70e5b938b7a1f..65fe0b105454d 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -164,6 +164,11 @@ public void testConcurrentPut() throws Exception { .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) + .setKeySizeFunction(getKeyWeigherFn()) + .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -194,7 +199,7 @@ public void testConcurrentPut() throws Exception { String value = ehcacheTest.get(getICacheKey(entry.getKey())); assertEquals(entry.getValue(), value); } - //assertEquals(randomKeys, mockEventListener.onCachedCount.get()); + assertEquals(randomKeys, ehcacheTest.stats().getTotalEntries()); ehcacheTest.close(); } } @@ -209,6 +214,11 @@ public void testEhcacheParallelGets() throws Exception { .setIsEventListenerModeSync(true) // For accurate count .setKeyType(String.class) .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) + .setKeySizeFunction(getKeyWeigherFn()) + .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -239,7 +249,7 @@ public void testEhcacheParallelGets() throws Exception { } phaser.arriveAndAwaitAdvance(); // Will trigger parallel puts above. countDownLatch.await(); // Wait for all threads to finish - //assertEquals(randomKeys, mockEventListener.onHitCount.get()); + assertEquals(randomKeys, ehcacheTest.stats().getTotalHits()); ehcacheTest.close(); } } @@ -252,6 +262,11 @@ public void testEhcacheKeyIterator() throws Exception { .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) + .setKeySizeFunction(getKeyWeigherFn()) + .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -274,7 +289,6 @@ public void testEhcacheKeyIterator() throws Exception { keysCount++; assertNotNull(ehcacheTest.get(key)); } - //assertEquals(CacheStoreType.DISK, ehcacheTest.getTierType()); assertEquals(keysCount, randomKeys); ehcacheTest.close(); } @@ -290,6 +304,11 @@ public void testEvictions() throws Exception { .setThreadPoolAlias("ehcacheTest") .setKeyType(String.class) .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) + .setKeySizeFunction(getKeyWeigherFn()) + .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -305,7 +324,7 @@ public void testEvictions() throws Exception { String key = "Key" + i; ehcacheTest.put(getICacheKey(key), value); } - //assertTrue(mockEventListener.onRemovalCount.get() > 0); + assertTrue(mockRemovalListener.onRemovalCount.get() > 0); ehcacheTest.close(); } } @@ -320,6 +339,11 @@ public void testComputeIfAbsentConcurrently() throws Exception { .setThreadPoolAlias("ehcacheTest") .setKeyType(String.class) .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) + .setKeySizeFunction(getKeyWeigherFn()) + .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -374,9 +398,9 @@ public String load(ICacheKey key) { } assertEquals(1, numberOfTimesValueLoaded); assertEquals(0, ((EhcacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); - //assertEquals(1, mockEventListener.onMissCount.get()); - //assertEquals(1, mockEventListener.onCachedCount.get()); - //assertEquals(numberOfRequest - 1, mockEventListener.onHitCount.get()); + assertEquals(1, ehcacheTest.stats().getTotalMisses()); + assertEquals(1, ehcacheTest.stats().getTotalEntries()); + assertEquals(numberOfRequest - 1, ehcacheTest.stats().getTotalHits()); ehcacheTest.close(); } } @@ -391,6 +415,11 @@ public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception .setThreadPoolAlias("ehcacheTest") .setKeyType(String.class) .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) + .setKeySizeFunction(getKeyWeigherFn()) + .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -448,6 +477,11 @@ public void testComputeIfAbsentWithNullValueLoading() throws Exception { .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) + .setKeySizeFunction(getKeyWeigherFn()) + .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -537,40 +571,6 @@ private Function getValueWeigherFn() { return (value) -> (long) value.length(); } - class MockEventListener implements StoreAwareCacheEventListener { - - AtomicInteger onMissCount = new AtomicInteger(); - AtomicInteger onHitCount = new AtomicInteger(); - AtomicInteger onCachedCount = new AtomicInteger(); - AtomicInteger onRemovalCount = new AtomicInteger(); - - MockEventListener() {} - - @Override - public void onMiss(K key, CacheStoreType cacheStoreType) { - assert cacheStoreType.equals(CacheStoreType.DISK); - onMissCount.incrementAndGet(); - } - - @Override - public void onRemoval(StoreAwareCacheRemovalNotification notification) { - assert notification.getCacheStoreType().equals(CacheStoreType.DISK); - onRemovalCount.incrementAndGet(); - } - - @Override - public void onHit(K key, V value, CacheStoreType cacheStoreType) { - assert cacheStoreType.equals(CacheStoreType.DISK); - onHitCount.incrementAndGet(); - } - - @Override - public void onCached(K key, V value, CacheStoreType cacheStoreType) { - assert cacheStoreType.equals(CacheStoreType.DISK); - onCachedCount.incrementAndGet(); - } - } - class MockRemovalListener implements RemovalListener, V> { AtomicInteger onRemovalCount = new AtomicInteger(); @Override From 51ea58352b5c40126f43df7969ffaabc47a0e16a Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Mon, 12 Feb 2024 13:37:07 -0800 Subject: [PATCH 10/32] Added partial memory tests Signed-off-by: Peter Alfonsi --- .../store/disk/EhCacheDiskCacheTests.java | 94 ++++++++++++++++++- 1 file changed, 91 insertions(+), 3 deletions(-) diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 65fe0b105454d..0d9943917ed05 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -27,7 +27,9 @@ import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -297,6 +299,8 @@ public void testEhcacheKeyIterator() throws Exception { public void testEvictions() throws Exception { Settings settings = Settings.builder().build(); MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); + Function, Long> keySizeFunction = getKeyWeigherFn(); + Function valueSizeFunction = getValueWeigherFn(); try (NodeEnvironment env = newNodeEnvironment(settings)) { ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") @@ -307,8 +311,8 @@ public void testEvictions() throws Exception { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(getKeyWeigherFn()) - .setValueSizeFunction(getValueWeigherFn()) + .setKeySizeFunction(keySizeFunction) + .setValueSizeFunction(valueSizeFunction) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -320,10 +324,20 @@ public void testEvictions() throws Exception { String value = generateRandomString(100); // Trying to generate more than 100kb to cause evictions. + long sizeOfAttemptedAdds = 0; + long sizeOfAttemptedAddsValue = 0; for (int i = 0; i < 1000; i++) { String key = "Key" + i; - ehcacheTest.put(getICacheKey(key), value); + ICacheKey iCacheKey = getICacheKey((key)); + sizeOfAttemptedAdds += keySizeFunction.apply(iCacheKey) + valueSizeFunction.apply(value); + sizeOfAttemptedAddsValue += valueSizeFunction.apply(value); + ehcacheTest.put(iCacheKey, value); + } + /*System.out.println("Total size of attempted adds = " + sizeOfAttemptedAdds); + System.out.println("Total size of attempted adds (value only) = " + sizeOfAttemptedAddsValue); + System.out.println("Total memory size = " + ehcacheTest.stats().getTotalMemorySize());*/ + // TODO: Figure out why ehcache is evicting at ~30-40% of its max size rather than 100% (see commented out prints above) assertTrue(mockRemovalListener.onRemovalCount.get() > 0); ehcacheTest.close(); } @@ -534,6 +548,80 @@ public String load(ICacheKey key) throws Exception { } } + public void testMemoryTracking() throws Exception { + // Test all cases for EhCacheEventListener.onEvent and check stats memory usage is updated correctly + Settings settings = Settings.builder().build(); + Function, Long> keySizeFunction = getKeyWeigherFn(); + Function valueSizeFunction = getValueWeigherFn(); + int initialKeyLength = 40; + int initialValueLength = 40; + long sizeForOneInitialEntry = keySizeFunction.apply(new ICacheKey<>(generateRandomString(initialKeyLength), getMockDimensions())) + valueSizeFunction.apply(generateRandomString(initialValueLength)); + int maxEntries = 2000; + try (NodeEnvironment env = newNodeEnvironment(settings)) { + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + .setThreadPoolAlias("ehcacheTest") + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setKeyType(String.class) + .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) + .setKeySizeFunction(keySizeFunction) + .setValueSizeFunction(valueSizeFunction) + .setIsEventListenerModeSync(true) // Test fails if async; probably not all updates happen before checking stats + .setCacheType(CacheType.INDICES_REQUEST_CACHE) + .setSettings(settings) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(maxEntries * sizeForOneInitialEntry) + .setRemovalListener(new MockRemovalListener<>()) + .build(); + long expectedSize = 0; + + // Test CREATED case + int numInitialKeys = randomIntBetween(10, 100); + ArrayList> initialKeys = new ArrayList<>(); + for (int i = 0; i < numInitialKeys; i++) { + ICacheKey key = new ICacheKey<>(generateRandomString(initialKeyLength), getMockDimensions()); + String value = generateRandomString(initialValueLength); + ehcacheTest.put(key, value); + initialKeys.add(key); + expectedSize += keySizeFunction.apply(key) + valueSizeFunction.apply(value); + assertEquals(expectedSize, ehcacheTest.stats().getTotalMemorySize()); + } + + // Test UPDATED case + HashMap, String> updatedValues = new HashMap<>(); + for (int i = 0; i < numInitialKeys * 0.5; i++) { + int newLengthDifference = randomIntBetween(-20, 20); + String newValue = generateRandomString(initialValueLength + newLengthDifference); + ehcacheTest.put(initialKeys.get(i), newValue); + updatedValues.put(initialKeys.get(i), newValue); + expectedSize += newLengthDifference; + assertEquals(expectedSize, ehcacheTest.stats().getTotalMemorySize()); + } + + // Test REMOVED case by removing all updated keys + for (int i = 0; i < numInitialKeys * 0.5; i++) { + ICacheKey removedKey = initialKeys.get(i); + ehcacheTest.invalidate(removedKey); + expectedSize -= keySizeFunction.apply(removedKey) + valueSizeFunction.apply(updatedValues.get(removedKey)); + assertEquals(expectedSize, ehcacheTest.stats().getTotalMemorySize()); + } + + // Test EVICTED case by adding entries past the cap and ensuring memory size stays as what we expect + for (int i = 0; i < maxEntries - ehcacheTest.count(); i++) { + ICacheKey key = new ICacheKey<>(generateRandomString(initialKeyLength), getMockDimensions()); + String value = generateRandomString(initialValueLength); + ehcacheTest.put(key, value); + } + // TODO: Ehcache incorrectly evicts at 30-40% of max size. Fix this test once we figure out why. + // Since the EVICTED and EXPIRED cases use the same code as REMOVED, we should be ok on testing them for now. + //assertEquals(maxEntries * sizeForOneInitialEntry, ehcacheTest.stats().getTotalMemorySize()); + + ehcacheTest.close(); + } + } + private static String generateRandomString(int length) { String characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; StringBuilder randomString = new StringBuilder(length); From 17cf26a719d2a26d099688c1597c6ed405de85fb Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Mon, 12 Feb 2024 14:32:27 -0800 Subject: [PATCH 11/32] Changed cache stats API to get values by dimension list Signed-off-by: Peter Alfonsi --- .../store/disk/EhCacheDiskCacheTests.java | 10 +++++--- .../common/cache/stats/CacheStats.java | 10 ++++---- .../stats/SingleDimensionCacheStats.java | 25 ++++++++++--------- .../cache/tier/TieredSpilloverCacheStats.java | 10 ++++---- .../stats/SingleDimensionCacheStatsTests.java | 14 +++++------ 5 files changed, 36 insertions(+), 33 deletions(-) diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 0d9943917ed05..4539bfdc1a0fc 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -86,11 +86,11 @@ public void testBasicGetAndPut() throws IOException { assertEquals(entry.getValue(), value); } assertEquals(randomKeys, ehcacheTest.stats().getTotalEntries()); - assertEquals(randomKeys, ehcacheTest.stats().getEntriesByDimension(getMockDimensions().get(0))); + assertEquals(randomKeys, ehcacheTest.stats().getEntriesByDimensions(List.of(getMockDimensions().get(0)))); assertEquals(randomKeys, ehcacheTest.stats().getTotalHits()); - assertEquals(randomKeys, ehcacheTest.stats().getHitsByDimension(getMockDimensions().get(0))); + assertEquals(randomKeys, ehcacheTest.stats().getHitsByDimensions(List.of(getMockDimensions().get(0)))); assertEquals(expectedSize, ehcacheTest.stats().getTotalMemorySize()); - assertEquals(expectedSize, ehcacheTest.stats().getMemorySizeByDimension(getMockDimensions().get(0))); + assertEquals(expectedSize, ehcacheTest.stats().getMemorySizeByDimensions(List.of(getMockDimensions().get(0)))); // Validate misses int expectedNumberOfMisses = randomIntBetween(10, 200); @@ -99,7 +99,7 @@ public void testBasicGetAndPut() throws IOException { } assertEquals(expectedNumberOfMisses, ehcacheTest.stats().getTotalMisses()); - assertEquals(expectedNumberOfMisses, ehcacheTest.stats().getMissesByDimension(getMockDimensions().get(0))); + assertEquals(expectedNumberOfMisses, ehcacheTest.stats().getMissesByDimensions(List.of(getMockDimensions().get(0)))); ehcacheTest.close(); } } @@ -549,6 +549,8 @@ public String load(ICacheKey key) throws Exception { } public void testMemoryTracking() throws Exception { + // This test leaks threads because of an issue in Ehcache: + // https://github.com/ehcache/ehcache3/issues/3204 // Test all cases for EhCacheEventListener.onEvent and check stats memory usage is updated correctly Settings settings = Settings.builder().build(); Function, Long> keySizeFunction = getKeyWeigherFn(); diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java index a2c0d2eb3483b..4f1bf30bc861b 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -23,11 +23,11 @@ public interface CacheStats extends Writeable { // TODO: Make this extend ToXCon long getTotalEvictions(); long getTotalMemorySize(); long getTotalEntries(); - long getHitsByDimension(CacheStatsDimension dimension); - long getMissesByDimension(CacheStatsDimension dimension); - long getEvictionsByDimension(CacheStatsDimension dimension); - long getMemorySizeByDimension(CacheStatsDimension dimension); - long getEntriesByDimension(CacheStatsDimension dimension); + long getHitsByDimensions(List dimensions); + long getMissesByDimensions(List dimensions); + long getEvictionsByDimensions(List dimensions); + long getMemorySizeByDimensions(List dimensions); + long getEntriesByDimensions(List dimensions); void incrementHitsByDimensions(List dimensions); void incrementMissesByDimensions(List dimensions); diff --git a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java index 9f0bc0ba68c55..0fb0c00f1a3cd 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java @@ -102,8 +102,9 @@ public long getTotalEntries() { return this.totalEntries.count(); } - private long internalGetByDimension(CacheStatsDimension dimension, Map metricsMap) { - CounterMetric counter = metricsMap.get(dimension.dimensionValue); + private long internalGetByDimension(List dimensions, Map metricsMap) { + assert dimensions.size() == 1; + CounterMetric counter = metricsMap.get(dimensions.get(0).dimensionValue); if (counter == null) { return 0; } @@ -111,28 +112,28 @@ private long internalGetByDimension(CacheStatsDimension dimension, Map dimensions) { + return internalGetByDimension(dimensions, hitsMap); } @Override - public long getMissesByDimension(CacheStatsDimension dimension) { - return internalGetByDimension(dimension, missesMap); + public long getMissesByDimensions(List dimensions) { + return internalGetByDimension(dimensions, missesMap); } @Override - public long getEvictionsByDimension(CacheStatsDimension dimension) { - return internalGetByDimension(dimension, evictionsMap); + public long getEvictionsByDimensions(List dimensions) { + return internalGetByDimension(dimensions, evictionsMap); } @Override - public long getMemorySizeByDimension(CacheStatsDimension dimension) { - return internalGetByDimension(dimension, memorySizeMap); + public long getMemorySizeByDimensions(List dimensions) { + return internalGetByDimension(dimensions, memorySizeMap); } @Override - public long getEntriesByDimension(CacheStatsDimension dimension) { - return internalGetByDimension(dimension, entriesMap); + public long getEntriesByDimensions(List dimensions) { + return internalGetByDimension(dimensions, entriesMap); } private boolean checkDimensionList(List dimensions) { diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java index 6d1a5c982ea30..c86d53c8438f5 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java @@ -56,27 +56,27 @@ public long getTotalEntries() { } @Override - public long getHitsByDimension(CacheStatsDimension dimension) { + public long getHitsByDimensions(List dimensions) { return 0; } @Override - public long getMissesByDimension(CacheStatsDimension dimension) { + public long getMissesByDimensions(List dimensions) { return 0; } @Override - public long getEvictionsByDimension(CacheStatsDimension dimension) { + public long getEvictionsByDimensions(List dimensions) { return 0; } @Override - public long getMemorySizeByDimension(CacheStatsDimension dimension) { + public long getMemorySizeByDimensions(List dimensions) { return 0; } @Override - public long getEntriesByDimension(CacheStatsDimension dimension) { + public long getEntriesByDimensions(List dimensions) { return 0; } diff --git a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java index f7fa1859b5469..3b8feceac5a8b 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java @@ -30,8 +30,8 @@ public void testAddAndGet() throws Exception { checkTotalResults(statsAndExpectedResults); // Check values returned for a nonexistent dimension value or name return 0 - assertEquals(0, stats.getHitsByDimension(new CacheStatsDimension(dimensionName, "nonexistent"))); - assertEquals(0, stats.getHitsByDimension(new CacheStatsDimension("nonexistentName", "nonexistentValue"))); + assertEquals(0, stats.getHitsByDimensions(List.of(new CacheStatsDimension(dimensionName, "nonexistent")))); + assertEquals(0, stats.getHitsByDimensions(List.of(new CacheStatsDimension("nonexistentName", "nonexistentValue")))); } public void testSerialization() throws Exception { @@ -144,11 +144,11 @@ private void checkShardResults(StatsAndExpectedResults statsAndExpectedResults) String shardIdString = String.valueOf(shardId); CacheStatsDimension dimension = getDim(shardId); - assertEquals((long) expectedResults.get("hits").get(shardIdString), stats.getHitsByDimension(dimension)); - assertEquals((long) expectedResults.get("misses").get(shardIdString), stats.getMissesByDimension(dimension)); - assertEquals((long) expectedResults.get("evictions").get(shardIdString), stats.getEvictionsByDimension(dimension)); - assertEquals((long) expectedResults.get("memory_size").get(shardIdString), stats.getMemorySizeByDimension(dimension)); - assertEquals((long) expectedResults.get("entries").get(shardIdString), stats.getEntriesByDimension(dimension)); + assertEquals((long) expectedResults.get("hits").get(shardIdString), stats.getHitsByDimensions(List.of(dimension))); + assertEquals((long) expectedResults.get("misses").get(shardIdString), stats.getMissesByDimensions(List.of(dimension))); + assertEquals((long) expectedResults.get("evictions").get(shardIdString), stats.getEvictionsByDimensions(List.of(dimension))); + assertEquals((long) expectedResults.get("memory_size").get(shardIdString), stats.getMemorySizeByDimensions(List.of(dimension))); + assertEquals((long) expectedResults.get("entries").get(shardIdString), stats.getEntriesByDimensions(List.of(dimension))); } } From be5eece0160e3ae753f40dfbbe4c7dd379ecc108 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Tue, 13 Feb 2024 14:10:01 -0800 Subject: [PATCH 12/32] Cleanup Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 3 +- .../store/disk/EhCacheDiskCacheTests.java | 6 +- .../org/opensearch/common/cache/ICache.java | 4 -- .../common/cache/{stats => }/ICacheKey.java | 4 +- .../cache/store/OpenSearchOnHeapCache.java | 28 ++++---- .../cache/store/builders/ICacheBuilder.java | 4 +- .../cache/store/config/ICacheConfig.java | 3 +- .../cache/tier/BytesReferenceSerializer.java | 42 ------------ .../cache/tier/ICacheKeySerializer.java | 3 +- .../cache/tier/TieredSpilloverCache.java | 8 +-- .../indices/IRCKeyWriteableSerializer.java | 63 ----------------- .../stats/SingleDimensionCacheStatsTests.java | 2 +- .../tier/BytesReferenceSerializerTests.java | 67 ------------------- .../cache/tier/ICacheKeySerializerTests.java | 4 +- .../cache/tier/TieredSpilloverCacheTests.java | 25 +------ .../IRCKeyWriteableSerializerTests.java | 56 ---------------- 16 files changed, 28 insertions(+), 294 deletions(-) rename server/src/main/java/org/opensearch/common/cache/{stats => }/ICacheKey.java (90%) delete mode 100644 server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java delete mode 100644 server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java delete mode 100644 server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java delete mode 100644 server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index c52177354a738..e61fae42a831b 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -23,7 +23,7 @@ import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.stats.ICacheKey; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.enums.CacheStoreType; @@ -47,7 +47,6 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Supplier; -import java.util.function.ToLongBiFunction; import org.ehcache.Cache; import org.ehcache.CachePersistenceException; diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 4539bfdc1a0fc..6e55fff2ee4ee 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -14,10 +14,7 @@ import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.stats.CacheStatsDimension; -import org.opensearch.common.cache.stats.ICacheKey; -import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.tier.Serializer; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -29,7 +26,6 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; diff --git a/server/src/main/java/org/opensearch/common/cache/ICache.java b/server/src/main/java/org/opensearch/common/cache/ICache.java index 174004ff720f0..6302f8fa23f85 100644 --- a/server/src/main/java/org/opensearch/common/cache/ICache.java +++ b/server/src/main/java/org/opensearch/common/cache/ICache.java @@ -8,11 +8,7 @@ package org.opensearch.common.cache; -import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.stats.ICacheKey; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.config.ICacheConfig; import java.io.Closeable; diff --git a/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java b/server/src/main/java/org/opensearch/common/cache/ICacheKey.java similarity index 90% rename from server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java rename to server/src/main/java/org/opensearch/common/cache/ICacheKey.java index 72c51259b464d..8acf0352f25d2 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/ICacheKey.java +++ b/server/src/main/java/org/opensearch/common/cache/ICacheKey.java @@ -6,9 +6,11 @@ * compatible open source license. */ -package org.opensearch.common.cache.stats; +package org.opensearch.common.cache; +import org.opensearch.common.cache.stats.CacheStatsDimension; + import java.util.List; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index 956829a639aa3..d1273b12813c6 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -14,13 +14,15 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.stats.ICacheKey; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; +import java.util.Objects; import java.util.function.ToLongBiFunction; /** @@ -35,6 +37,7 @@ public class OpenSearchOnHeapCache implements ICache, RemovalListene private final Cache, V> cache; private final CacheStats stats; + private final RemovalListener, V> removalListener; public OpenSearchOnHeapCache(Builder builder) { CacheBuilder, V> cacheBuilder = CacheBuilder., V>builder() @@ -45,7 +48,9 @@ public OpenSearchOnHeapCache(Builder builder) { cacheBuilder.setExpireAfterAccess(builder.getExpireAfterAcess()); } cache = cacheBuilder.build(); - this.stats = new SingleDimensionCacheStats(builder.shardIdDimensionName); + String dimensionName = Objects.requireNonNull(builder.shardIdDimensionName, "Shard id dimension name can't be null"); + this.stats = new SingleDimensionCacheStats(dimensionName); + this.removalListener = builder.getRemovalListener(); } @Override @@ -53,8 +58,10 @@ public V get(ICacheKey key) { V value = cache.get(key); if (value != null) { //eventListener.onHit(key, value, CacheStoreType.ON_HEAP); + stats.incrementHitsByDimensions(key.dimensions); } else { //eventListener.onMiss(key, CacheStoreType.ON_HEAP); + stats.incrementMissesByDimensions(key.dimensions); } return value; } @@ -63,6 +70,7 @@ public V get(ICacheKey key) { public void put(ICacheKey key, V value) { cache.put(key, value); //eventListener.onCached(key, value, CacheStoreType.ON_HEAP); + stats.incrementEntriesByDimensions(key.dimensions); } @Override @@ -70,9 +78,12 @@ public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> V value = cache.computeIfAbsent(key, key1 -> loader.load(key)); if (!loader.isLoaded()) { //eventListener.onHit(key, value, CacheStoreType.ON_HEAP); + stats.incrementHitsByDimensions(key.dimensions); } else { //eventListener.onMiss(key, CacheStoreType.ON_HEAP); + stats.incrementMissesByDimensions(key.dimensions); //eventListener.onCached(key, value, CacheStoreType.ON_HEAP); + stats.incrementEntriesByDimensions(key.dimensions); } return value; } @@ -80,6 +91,7 @@ public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> @Override public void invalidate(ICacheKey key) { cache.invalidate(key); + stats.decrementEntriesByDimensions(key.dimensions); } @Override @@ -112,19 +124,9 @@ public CacheStats stats() { @Override public void onRemoval(RemovalNotification, V> notification) { - // TODO + removalListener.onRemoval(notification); } - /** - * Stats for opensearch on heap cache. - */ - /*class OpenSearchOnHeapCacheStats implements CacheStats { - @Override - public long count() { - return cache.count(); - } - }*/ - /** * Builder object * @param Type of key diff --git a/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java b/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java index 9ec6346f6698e..c109a4504bf04 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java +++ b/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java @@ -10,9 +10,7 @@ import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.RemovalListener; -import org.opensearch.common.cache.stats.ICacheKey; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; diff --git a/server/src/main/java/org/opensearch/common/cache/store/config/ICacheConfig.java b/server/src/main/java/org/opensearch/common/cache/store/config/ICacheConfig.java index d62210b81a92a..849e1de255d8d 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/config/ICacheConfig.java +++ b/server/src/main/java/org/opensearch/common/cache/store/config/ICacheConfig.java @@ -10,8 +10,7 @@ import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.RemovalListener; -import org.opensearch.common.cache.stats.ICacheKey; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.settings.Settings; /** diff --git a/server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java b/server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java deleted file mode 100644 index 3ac30b09bddca..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.tier; - -import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.common.bytes.BytesReference; - -import java.util.Arrays; - -/** - * A serializer which transforms BytesReference to byte[]. - * The type of BytesReference is NOT preserved after deserialization, but nothing in opensearch should care. - */ -public class BytesReferenceSerializer implements Serializer { - // This class does not get passed to ehcache itself, so it's not required that classes match after deserialization. - - public BytesReferenceSerializer() {} - - @Override - public byte[] serialize(BytesReference object) { - return BytesReference.toBytes(object); - } - - @Override - public BytesReference deserialize(byte[] bytes) { - if (bytes == null) { - return null; - } - return new BytesArray(bytes); - } - - @Override - public boolean equals(BytesReference object, byte[] bytes) { - return Arrays.equals(serialize(object), bytes); - } -} diff --git a/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java b/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java index 0e0b83495ea49..b55448b07feb0 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/ICacheKeySerializer.java @@ -8,10 +8,9 @@ package org.opensearch.common.cache.tier; -import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; import org.opensearch.common.cache.stats.CacheStatsDimension; -import org.opensearch.common.cache.stats.ICacheKey; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.BytesStreamInput; diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java index d25137635edfd..77433d412befa 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java @@ -12,15 +12,9 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; -import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.stats.ICacheKey; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.StoreAwareCacheValue; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.store.builders.ICacheBuilder; -import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.util.concurrent.ReleasableLock; import org.opensearch.common.util.iterable.Iterables; diff --git a/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java b/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java deleted file mode 100644 index 2a288c47981c5..0000000000000 --- a/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.indices; - -import org.opensearch.OpenSearchException; -import org.opensearch.common.cache.tier.Serializer; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.common.io.stream.BytesStreamInput; - -import java.io.IOException; -import java.util.Arrays; - -/** - * This class serializes the IndicesRequestCache.Key using its writeTo method. - */ -public class IRCKeyWriteableSerializer implements Serializer { - - - public IRCKeyWriteableSerializer() { - } - - @Override - public byte[] serialize(IndicesRequestCache.Key object) { - try { - BytesStreamOutput os = new BytesStreamOutput(); - object.writeTo(os); - return BytesReference.toBytes(os.bytes()); - } catch (IOException e) { - throw new OpenSearchException(e); - } - } - - @Override - public IndicesRequestCache.Key deserialize(byte[] bytes) { - if (bytes == null) { - return null; - } - try { - BytesStreamInput is = new BytesStreamInput(bytes, 0, bytes.length); - return new IndicesRequestCache.Key(is); - } catch (IOException e) { - throw new OpenSearchException(e); - } - } - - @Override - public boolean equals(IndicesRequestCache.Key object, byte[] bytes) { - // Deserialization is much slower than serialization for keys of order 1 KB, - // while time to serialize is fairly constant (per byte) - if (bytes.length < 5000) { - return Arrays.equals(serialize(object), bytes); - } else { - return object.equals(deserialize(bytes)); - } - } -} diff --git a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java index 3b8feceac5a8b..2e0cb56fda3c9 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java @@ -115,7 +115,7 @@ private StatsAndExpectedResults getPopulatedStats() { expectedMemorySize.put(shardIdString, expectedMemorySize.get(shardIdString) + memIncrementAmount); } - int numEntryIncrements = rand.nextInt(10); + int numEntryIncrements = rand.nextInt(9) + 1; for (int i = 0; i < numEntryIncrements; i++) { stats.incrementEntriesByDimensions(dimensions); expectedEntries.put(shardIdString, expectedEntries.get(shardIdString) + 1); diff --git a/server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java b/server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java deleted file mode 100644 index af81f04149ae6..0000000000000 --- a/server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.tier; - -import org.opensearch.common.Randomness; -import org.opensearch.common.bytes.ReleasableBytesReference; -import org.opensearch.common.util.BigArrays; -import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.common.bytes.CompositeBytesReference; -import org.opensearch.core.common.util.ByteArray; -import org.opensearch.test.OpenSearchTestCase; - -import java.util.Random; - -public class BytesReferenceSerializerTests extends OpenSearchTestCase { - public void testEquality() throws Exception { - BytesReferenceSerializer ser = new BytesReferenceSerializer(); - // Test that values are equal before and after serialization, for each implementation of BytesReference. - byte[] bytesValue = new byte[1000]; - Random rand = Randomness.get(); - rand.nextBytes(bytesValue); - - BytesReference ba = new BytesArray(bytesValue); - byte[] serialized = ser.serialize(ba); - assertTrue(ser.equals(ba, serialized)); - BytesReference deserialized = ser.deserialize(serialized); - assertEquals(ba, deserialized); - - ba = new BytesArray(new byte[] {}); - serialized = ser.serialize(ba); - assertTrue(ser.equals(ba, serialized)); - deserialized = ser.deserialize(serialized); - assertEquals(ba, deserialized); - - BytesReference cbr = CompositeBytesReference.of(new BytesArray(bytesValue), new BytesArray(bytesValue)); - serialized = ser.serialize(cbr); - assertTrue(ser.equals(cbr, serialized)); - deserialized = ser.deserialize(serialized); - assertEquals(cbr, deserialized); - - // We need the PagedBytesReference to be larger than the page size (16 KB) in order to actually create it - byte[] pbrValue = new byte[PageCacheRecycler.PAGE_SIZE_IN_BYTES * 2]; - rand.nextBytes(pbrValue); - ByteArray arr = BigArrays.NON_RECYCLING_INSTANCE.newByteArray(pbrValue.length); - arr.set(0L, pbrValue, 0, pbrValue.length); - assert !arr.hasArray(); - BytesReference pbr = BytesReference.fromByteArray(arr, pbrValue.length); - serialized = ser.serialize(pbr); - assertTrue(ser.equals(pbr, serialized)); - deserialized = ser.deserialize(serialized); - assertEquals(pbr, deserialized); - - BytesReference rbr = new ReleasableBytesReference(new BytesArray(bytesValue), ReleasableBytesReference.NO_OP); - serialized = ser.serialize(rbr); - assertTrue(ser.equals(rbr, serialized)); - deserialized = ser.deserialize(serialized); - assertEquals(rbr, deserialized); - } -} diff --git a/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java index c1f062276dbfa..ef6e9ac2ee369 100644 --- a/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java +++ b/server/src/test/java/org/opensearch/common/cache/tier/ICacheKeySerializerTests.java @@ -10,13 +10,11 @@ import org.opensearch.common.Randomness; import org.opensearch.common.cache.stats.CacheStatsDimension; -import org.opensearch.common.cache.stats.ICacheKey; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.test.OpenSearchTestCase; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Random; diff --git a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java index 158b7be2315f8..9d53b8bcef4d4 100644 --- a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java +++ b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java @@ -8,33 +8,12 @@ package org.opensearch.common.cache.tier; -import org.opensearch.common.cache.LoadAwareCacheLoader; -import org.opensearch.common.cache.RemovalReason; -import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.stats.ICacheKey; -import org.opensearch.common.cache.store.OpenSearchOnHeapCache; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.builders.ICacheBuilder; -import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; -import org.opensearch.common.metrics.CounterMetric; import org.opensearch.test.OpenSearchTestCase; -import java.util.ArrayList; -import java.util.EnumMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Phaser; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - public class TieredSpilloverCacheTests extends OpenSearchTestCase { + // TODO: Implementation of new stats is not yet down for TieredSpilloverCache + /*public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception { int onHeapCacheSize = randomIntBetween(10, 30); MockCacheEventListener eventListener = new MockCacheEventListener(); diff --git a/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java b/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java deleted file mode 100644 index 61484db6a51d5..0000000000000 --- a/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ -package org.opensearch.indices; - -import org.opensearch.common.Randomness; -import org.opensearch.common.settings.ClusterSettings; -import org.opensearch.common.settings.Settings; -import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.index.shard.ShardId; -import org.opensearch.index.IndexService; -import org.opensearch.index.shard.IndexShard; -import org.opensearch.test.OpenSearchSingleNodeTestCase; - -import java.util.Random; -import java.util.UUID; - -public class IRCKeyWriteableSerializerTests extends OpenSearchSingleNodeTestCase { - - public void testSerializer() throws Exception { - IndexService indexService = createIndex("test"); - IndexShard indexShard = indexService.getShardOrNull(0); - IRCKeyWriteableSerializer ser = new IRCKeyWriteableSerializer(); - - int NUM_KEYS = 1000; - int[] valueLengths = new int[] { 1000, 6000 }; // test both branches in equals() - Random rand = Randomness.get(); - for (int valueLength : valueLengths) { - for (int i = 0; i < NUM_KEYS; i++) { - IndicesRequestCache.Key key = getRandomIRCKey(valueLength, rand, indexShard.shardId()); - byte[] serialized = ser.serialize(key); - assertTrue(ser.equals(key, serialized)); - IndicesRequestCache.Key deserialized = ser.deserialize(serialized); - assertTrue(key.equals(deserialized)); - } - } - } - - private IndicesRequestCache.Key getRandomIRCKey( - int valueLength, - Random random, - ShardId shard - ) { - byte[] value = new byte[valueLength]; - for (int i = 0; i < valueLength; i++) { - value[i] = (byte) (random.nextInt(126 - 32) + 32); - } - BytesReference keyValue = new BytesArray(value); - return new IndicesRequestCache.Key(shard, keyValue, UUID.randomUUID().toString()); // same UUID source as used in real key - } -} From a6b0899d9f52a58a09827ba689b6a275d3ec4fc3 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Tue, 13 Feb 2024 14:29:16 -0800 Subject: [PATCH 13/32] Redid memory tracking to use already implemented weigher fn Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 49 ++++++--------- .../store/disk/EhCacheDiskCacheTests.java | 61 +++++++------------ 2 files changed, 41 insertions(+), 69 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index e61fae42a831b..44c272058733a 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -45,8 +45,8 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.function.BiFunction; -import java.util.function.Function; import java.util.function.Supplier; +import java.util.function.ToLongBiFunction; import org.ehcache.Cache; import org.ehcache.CachePersistenceException; @@ -144,8 +144,7 @@ private EhcacheDiskCache(Builder builder) { this.cacheManager = buildCacheManager(); this.ehCacheEventListener = new EhCacheEventListener( Objects.requireNonNull(builder.getRemovalListener(), "Removal listener can't be null"), - Objects.requireNonNull(builder.keySizeFunction, "Key sizing function shouldn't be null"), - Objects.requireNonNull(builder.valueSizeFunction, "Value sizing function shouldn't be null"), + Objects.requireNonNull(builder.getWeigher(), "Weigher function can't be null"), this.valueSerializer); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); this.shardIdDimensionName = Objects.requireNonNull(builder.shardIdDimensionName, "Dimension name can't be null"); @@ -452,22 +451,23 @@ class EhCacheEventListener implements CacheEventListener, byt //private final StoreAwareCacheEventListener eventListener; private final RemovalListener, V> removalListener; - private Function, Long> keySizeFunction; - private Function valueSizeFunction; + private ToLongBiFunction, V> weigher; private Serializer valueSerializer; EhCacheEventListener(RemovalListener, V> removalListener, - Function, Long> keySizeFunction, - Function valueSizeFunction, + ToLongBiFunction, V> weigher, Serializer valueSerializer) { this.removalListener = removalListener; - this.keySizeFunction = keySizeFunction; - this.valueSizeFunction = valueSizeFunction; + this.weigher = weigher; this.valueSerializer = valueSerializer; } - private long getKeyAndOldValueSize(CacheEvent, ? extends byte[]> event) { - return keySizeFunction.apply(event.getKey()) + valueSizeFunction.apply(valueSerializer.deserialize(event.getOldValue())); + private long getOldValuePairSize(CacheEvent, ? extends byte[]> event) { + return weigher.applyAsLong(event.getKey(), valueSerializer.deserialize(event.getOldValue())); + } + + private long getNewValuePairSize(CacheEvent, ? extends byte[]> event) { + return weigher.applyAsLong(event.getKey(), valueSerializer.deserialize(event.getNewValue())); } @Override @@ -475,32 +475,31 @@ public void onEvent(CacheEvent, ? extends byte[]> event) switch (event.getType()) { case CREATED: stats.incrementEntriesByDimensions(event.getKey().dimensions); - long totalSize = keySizeFunction.apply(event.getKey()) + valueSizeFunction.apply(valueSerializer.deserialize(event.getNewValue())); - stats.incrementMemorySizeByDimensions(event.getKey().dimensions, totalSize); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, getNewValuePairSize(event)); assert event.getOldValue() == null; break; case EVICTED: this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.EVICTED)); stats.decrementEntriesByDimensions(event.getKey().dimensions); - stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getKeyAndOldValueSize(event)); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getOldValuePairSize(event)); assert event.getNewValue() == null; break; case REMOVED: this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.EXPLICIT)); stats.decrementEntriesByDimensions(event.getKey().dimensions); - stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getKeyAndOldValueSize(event)); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getOldValuePairSize(event)); assert event.getNewValue() == null; break; case EXPIRED: this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.INVALIDATED)); stats.decrementEntriesByDimensions(event.getKey().dimensions); - stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getKeyAndOldValueSize(event)); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getOldValuePairSize(event)); assert event.getNewValue() == null; break; case UPDATED: - long newKeySize = valueSizeFunction.apply(valueSerializer.deserialize(event.getNewValue())); - long oldKeySize = valueSizeFunction.apply(valueSerializer.deserialize(event.getOldValue())); - stats.incrementMemorySizeByDimensions(event.getKey().dimensions, newKeySize - oldKeySize); + long newSize = getNewValuePairSize(event); + long oldSize = getOldValuePairSize(event); + stats.incrementMemorySizeByDimensions(event.getKey().dimensions, newSize - oldSize); break; default: break; @@ -596,8 +595,6 @@ public static class Builder extends ICacheBuilder { private String shardIdDimensionName; private Serializer keySerializer; private Serializer valueSerializer; - private Function, Long> keySizeFunction; - private Function valueSizeFunction; /** * Default constructor. Added to fix javadocs. @@ -689,16 +686,6 @@ public Builder setValueSerializer(Serializer valueSerializer) { return this; } - public Builder setKeySizeFunction(Function, Long> fn) { - this.keySizeFunction = fn; - return this; - } - - public Builder setValueSizeFunction(Function fn) { - this.valueSizeFunction = fn; - return this; - } - //@Override public EhcacheDiskCache build() { return new EhcacheDiskCache<>(this); diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 6e55fff2ee4ee..b00436b5c1764 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -36,6 +36,7 @@ import java.util.concurrent.Phaser; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; +import java.util.function.ToLongBiFunction; import static org.hamcrest.CoreMatchers.instanceOf; @@ -47,8 +48,7 @@ public class EhCacheDiskCacheTests extends OpenSearchSingleNodeTestCase { public void testBasicGetAndPut() throws IOException { Settings settings = Settings.builder().build(); MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); - Function, Long> keySizeFunction = getKeyWeigherFn(); - Function valueSizeFunction = getValueWeigherFn(); + ToLongBiFunction, String> weigher = getWeigher(); try (NodeEnvironment env = newNodeEnvironment(settings)) { ICache ehcacheTest = new EhcacheDiskCache.Builder().setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") @@ -57,13 +57,12 @@ public void testBasicGetAndPut() throws IOException { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(keySizeFunction) - .setValueSizeFunction(valueSizeFunction) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(mockRemovalListener) + .setWeigher(weigher) .build(); int randomKeys = randomIntBetween(10, 100); long expectedSize = 0; @@ -74,8 +73,7 @@ public void testBasicGetAndPut() throws IOException { for (Map.Entry entry : keyValueMap.entrySet()) { ICacheKey iCacheKey = getICacheKey(entry.getKey()); ehcacheTest.put(iCacheKey, entry.getValue()); - expectedSize += keySizeFunction.apply(iCacheKey); - expectedSize += valueSizeFunction.apply(entry.getValue()); + expectedSize += weigher.applyAsLong(iCacheKey, entry.getValue()); } for (Map.Entry entry : keyValueMap.entrySet()) { String value = ehcacheTest.get(getICacheKey(entry.getKey())); @@ -165,13 +163,12 @@ public void testConcurrentPut() throws Exception { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(getKeyWeigherFn()) - .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(mockRemovalListener) + .setWeigher(getWeigher()) .build(); int randomKeys = randomIntBetween(20, 100); Thread[] threads = new Thread[randomKeys]; @@ -215,13 +212,12 @@ public void testEhcacheParallelGets() throws Exception { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(getKeyWeigherFn()) - .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(mockRemovalListener) + .setWeigher(getWeigher()) .build(); int randomKeys = randomIntBetween(20, 100); Thread[] threads = new Thread[randomKeys]; @@ -263,13 +259,12 @@ public void testEhcacheKeyIterator() throws Exception { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(getKeyWeigherFn()) - .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(new MockRemovalListener<>()) + .setWeigher(getWeigher()) .build(); int randomKeys = randomIntBetween(2, 100); @@ -295,8 +290,7 @@ public void testEhcacheKeyIterator() throws Exception { public void testEvictions() throws Exception { Settings settings = Settings.builder().build(); MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); - Function, Long> keySizeFunction = getKeyWeigherFn(); - Function valueSizeFunction = getValueWeigherFn(); + ToLongBiFunction, String> weigher = getWeigher(); try (NodeEnvironment env = newNodeEnvironment(settings)) { ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") @@ -307,13 +301,12 @@ public void testEvictions() throws Exception { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(keySizeFunction) - .setValueSizeFunction(valueSizeFunction) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(mockRemovalListener) + .setWeigher(weigher) .build(); // Generate a string with 100 characters @@ -325,8 +318,7 @@ public void testEvictions() throws Exception { for (int i = 0; i < 1000; i++) { String key = "Key" + i; ICacheKey iCacheKey = getICacheKey((key)); - sizeOfAttemptedAdds += keySizeFunction.apply(iCacheKey) + valueSizeFunction.apply(value); - sizeOfAttemptedAddsValue += valueSizeFunction.apply(value); + sizeOfAttemptedAdds += weigher.applyAsLong(iCacheKey, value); ehcacheTest.put(iCacheKey, value); } @@ -352,13 +344,12 @@ public void testComputeIfAbsentConcurrently() throws Exception { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(getKeyWeigherFn()) - .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(mockRemovalListener) + .setWeigher(getWeigher()) .build(); int numberOfRequest = 2;// randomIntBetween(200, 400); @@ -428,13 +419,12 @@ public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(getKeyWeigherFn()) - .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(mockRemovalListener) + .setWeigher(getWeigher()) .build(); int numberOfRequest = randomIntBetween(200, 400); @@ -490,13 +480,12 @@ public void testComputeIfAbsentWithNullValueLoading() throws Exception { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(getKeyWeigherFn()) - .setValueSizeFunction(getValueWeigherFn()) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setRemovalListener(mockRemovalListener) + .setWeigher(getWeigher()) .build(); int numberOfRequest = randomIntBetween(200, 400); @@ -549,11 +538,10 @@ public void testMemoryTracking() throws Exception { // https://github.com/ehcache/ehcache3/issues/3204 // Test all cases for EhCacheEventListener.onEvent and check stats memory usage is updated correctly Settings settings = Settings.builder().build(); - Function, Long> keySizeFunction = getKeyWeigherFn(); - Function valueSizeFunction = getValueWeigherFn(); + ToLongBiFunction, String> weigher = getWeigher(); int initialKeyLength = 40; int initialValueLength = 40; - long sizeForOneInitialEntry = keySizeFunction.apply(new ICacheKey<>(generateRandomString(initialKeyLength), getMockDimensions())) + valueSizeFunction.apply(generateRandomString(initialValueLength)); + long sizeForOneInitialEntry = weigher.applyAsLong(new ICacheKey<>(generateRandomString(initialKeyLength), getMockDimensions()), generateRandomString(initialValueLength)); int maxEntries = 2000; try (NodeEnvironment env = newNodeEnvironment(settings)) { ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") @@ -564,14 +552,13 @@ public void testMemoryTracking() throws Exception { .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) .setShardIdDimensionName(dimensionName) - .setKeySizeFunction(keySizeFunction) - .setValueSizeFunction(valueSizeFunction) .setIsEventListenerModeSync(true) // Test fails if async; probably not all updates happen before checking stats .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(maxEntries * sizeForOneInitialEntry) .setRemovalListener(new MockRemovalListener<>()) + .setWeigher(weigher) .build(); long expectedSize = 0; @@ -583,7 +570,7 @@ public void testMemoryTracking() throws Exception { String value = generateRandomString(initialValueLength); ehcacheTest.put(key, value); initialKeys.add(key); - expectedSize += keySizeFunction.apply(key) + valueSizeFunction.apply(value); + expectedSize += weigher.applyAsLong(key, value); assertEquals(expectedSize, ehcacheTest.stats().getTotalMemorySize()); } @@ -602,7 +589,7 @@ public void testMemoryTracking() throws Exception { for (int i = 0; i < numInitialKeys * 0.5; i++) { ICacheKey removedKey = initialKeys.get(i); ehcacheTest.invalidate(removedKey); - expectedSize -= keySizeFunction.apply(removedKey) + valueSizeFunction.apply(updatedValues.get(removedKey)); + expectedSize -= weigher.applyAsLong(removedKey, updatedValues.get(removedKey)); assertEquals(expectedSize, ehcacheTest.stats().getTotalMemorySize()); } @@ -640,23 +627,21 @@ private ICacheKey getICacheKey(String key) { return new ICacheKey<>(key, getMockDimensions()); } - private Function, Long> getKeyWeigherFn() { - // TODO: Should this function come from the serializer impl? - return (iCacheKey) -> { + private ToLongBiFunction, String> getWeigher() { + return (iCacheKey, value) -> { + // Size consumed by key long totalSize = iCacheKey.key.length(); for (CacheStatsDimension dim : iCacheKey.dimensions) { totalSize += dim.dimensionName.length(); totalSize += dim.dimensionValue.length(); } totalSize += 10; // The ICacheKeySerializer writes 2 VInts to record array lengths, which can be 1-5 bytes each + // Size consumed by value + totalSize += value.length(); return totalSize; }; } - private Function getValueWeigherFn() { - return (value) -> (long) value.length(); - } - class MockRemovalListener implements RemovalListener, V> { AtomicInteger onRemovalCount = new AtomicInteger(); @Override From 9b1e66ce1d4dcc50b9b5a7f51bf7e773f0d984a6 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Tue, 13 Feb 2024 15:42:14 -0800 Subject: [PATCH 14/32] Split CacheStats into CacheStats and CacheStatsBase, which can't update stats Signed-off-by: Peter Alfonsi --- .../common/cache/stats/CacheStats.java | 21 +++--------- .../common/cache/stats/CacheStatsBase.java | 34 +++++++++++++++++++ 2 files changed, 39 insertions(+), 16 deletions(-) create mode 100644 server/src/main/java/org/opensearch/common/cache/stats/CacheStatsBase.java diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java index 4f1bf30bc861b..3aebb49145128 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -8,34 +8,23 @@ package org.opensearch.common.cache.stats; -import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.core.xcontent.ToXContentFragment; import java.util.List; /** - * Interface for any cache specific stats. - * TODO: Add rest of stats like hits/misses. + * Interface for any cache specific stats. Allows accessing stats by total value or by dimension, + * and also allows updating stats. + * When updating stats, we take in the list of dimensions associated with the key/value pair that caused the update. + * This allows us to aggregate stats by dimension when accessing them. */ -public interface CacheStats extends Writeable { // TODO: Make this extend ToXContentFragment too - long getTotalHits(); - long getTotalMisses(); - long getTotalEvictions(); - long getTotalMemorySize(); - long getTotalEntries(); - long getHitsByDimensions(List dimensions); - long getMissesByDimensions(List dimensions); - long getEvictionsByDimensions(List dimensions); - long getMemorySizeByDimensions(List dimensions); - long getEntriesByDimensions(List dimensions); - +public interface CacheStats extends CacheStatsBase { // TODO: Make this extend ToXContentFragment too void incrementHitsByDimensions(List dimensions); void incrementMissesByDimensions(List dimensions); void incrementEvictionsByDimensions(List dimensions); // Can also use to decrement, with negative values void incrementMemorySizeByDimensions(List dimensions, long amountBytes); void incrementEntriesByDimensions(List dimensions); - void decrementEntriesByDimensions(List dimensions); } diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsBase.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsBase.java new file mode 100644 index 0000000000000..779bfba937e37 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsBase.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +import org.opensearch.core.common.io.stream.Writeable; + +import java.util.List; + +/** + * An interface for accessing stats for a specific cache. Stats can be gotten as totals, or only for specific + * dimension values. + */ +public interface CacheStatsBase extends Writeable { // TODO: Also extend ToXContentFragment + // Methods to get total values. + long getTotalHits(); + long getTotalMisses(); + long getTotalEvictions(); + long getTotalMemorySize(); + long getTotalEntries(); + + // Methods to get values for a specific set of dimensions. + // Returns the sum of values for cache entries that match all dimensions in the list. + long getHitsByDimensions(List dimensions); + long getMissesByDimensions(List dimensions); + long getEvictionsByDimensions(List dimensions); + long getMemorySizeByDimensions(List dimensions); + long getEntriesByDimensions(List dimensions); +} From 01e04a8e42c8fdf2fb61605ebfe61953fd7c694a Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Tue, 13 Feb 2024 15:44:15 -0800 Subject: [PATCH 15/32] Readded BytesReferenceSerializer impl as ICacheKeySerializerTests depended on it Signed-off-by: Peter Alfonsi --- .../cache/tier/BytesReferenceSerializer.java | 42 ++++++++++++ .../tier/BytesReferenceSerializerTests.java | 67 +++++++++++++++++++ 2 files changed, 109 insertions(+) create mode 100644 server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java create mode 100644 server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java diff --git a/server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java b/server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java new file mode 100644 index 0000000000000..3ac30b09bddca --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/tier/BytesReferenceSerializer.java @@ -0,0 +1,42 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; + +import java.util.Arrays; + +/** + * A serializer which transforms BytesReference to byte[]. + * The type of BytesReference is NOT preserved after deserialization, but nothing in opensearch should care. + */ +public class BytesReferenceSerializer implements Serializer { + // This class does not get passed to ehcache itself, so it's not required that classes match after deserialization. + + public BytesReferenceSerializer() {} + + @Override + public byte[] serialize(BytesReference object) { + return BytesReference.toBytes(object); + } + + @Override + public BytesReference deserialize(byte[] bytes) { + if (bytes == null) { + return null; + } + return new BytesArray(bytes); + } + + @Override + public boolean equals(BytesReference object, byte[] bytes) { + return Arrays.equals(serialize(object), bytes); + } +} diff --git a/server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java b/server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java new file mode 100644 index 0000000000000..af81f04149ae6 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/tier/BytesReferenceSerializerTests.java @@ -0,0 +1,67 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.common.Randomness; +import org.opensearch.common.bytes.ReleasableBytesReference; +import org.opensearch.common.util.BigArrays; +import org.opensearch.common.util.PageCacheRecycler; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.bytes.CompositeBytesReference; +import org.opensearch.core.common.util.ByteArray; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Random; + +public class BytesReferenceSerializerTests extends OpenSearchTestCase { + public void testEquality() throws Exception { + BytesReferenceSerializer ser = new BytesReferenceSerializer(); + // Test that values are equal before and after serialization, for each implementation of BytesReference. + byte[] bytesValue = new byte[1000]; + Random rand = Randomness.get(); + rand.nextBytes(bytesValue); + + BytesReference ba = new BytesArray(bytesValue); + byte[] serialized = ser.serialize(ba); + assertTrue(ser.equals(ba, serialized)); + BytesReference deserialized = ser.deserialize(serialized); + assertEquals(ba, deserialized); + + ba = new BytesArray(new byte[] {}); + serialized = ser.serialize(ba); + assertTrue(ser.equals(ba, serialized)); + deserialized = ser.deserialize(serialized); + assertEquals(ba, deserialized); + + BytesReference cbr = CompositeBytesReference.of(new BytesArray(bytesValue), new BytesArray(bytesValue)); + serialized = ser.serialize(cbr); + assertTrue(ser.equals(cbr, serialized)); + deserialized = ser.deserialize(serialized); + assertEquals(cbr, deserialized); + + // We need the PagedBytesReference to be larger than the page size (16 KB) in order to actually create it + byte[] pbrValue = new byte[PageCacheRecycler.PAGE_SIZE_IN_BYTES * 2]; + rand.nextBytes(pbrValue); + ByteArray arr = BigArrays.NON_RECYCLING_INSTANCE.newByteArray(pbrValue.length); + arr.set(0L, pbrValue, 0, pbrValue.length); + assert !arr.hasArray(); + BytesReference pbr = BytesReference.fromByteArray(arr, pbrValue.length); + serialized = ser.serialize(pbr); + assertTrue(ser.equals(pbr, serialized)); + deserialized = ser.deserialize(serialized); + assertEquals(pbr, deserialized); + + BytesReference rbr = new ReleasableBytesReference(new BytesArray(bytesValue), ReleasableBytesReference.NO_OP); + serialized = ser.serialize(rbr); + assertTrue(ser.equals(rbr, serialized)); + deserialized = ser.deserialize(serialized); + assertEquals(rbr, deserialized); + } +} From 3777e3f6ef87704f2aa21a0149e265fb6e800b8c Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Wed, 14 Feb 2024 10:52:07 -0800 Subject: [PATCH 16/32] Changed SingleDimensionCacheStats to use ConcurrentMap Signed-off-by: Peter Alfonsi --- .../stats/SingleDimensionCacheStats.java | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java index 0fb0c00f1a3cd..b0cc183a78bc7 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java @@ -17,6 +17,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; /** * A CacheStats implementation for caches that aggregate over a single dimension. @@ -24,11 +26,11 @@ */ public class SingleDimensionCacheStats implements CacheStats { // Maintain a counter metric for each shard id (dimension values) - private final Map hitsMap; - private final Map missesMap; - private final Map evictionsMap; - private final Map memorySizeMap; - private final Map entriesMap; + private final ConcurrentMap hitsMap; + private final ConcurrentMap missesMap; + private final ConcurrentMap evictionsMap; + private final ConcurrentMap memorySizeMap; + private final ConcurrentMap entriesMap; // Also maintain a single total counter metric, to avoid having to sum over many values for shards private final CounterMetric totalHits; @@ -41,11 +43,11 @@ public class SingleDimensionCacheStats implements CacheStats { private final String allowedDimensionName; public SingleDimensionCacheStats(String allowedDimensionName) { - this.hitsMap = new HashMap<>(); - this.missesMap = new HashMap<>(); - this.evictionsMap = new HashMap<>(); - this.memorySizeMap = new HashMap<>(); - this.entriesMap = new HashMap<>(); + this.hitsMap = new ConcurrentHashMap<>(); + this.missesMap = new ConcurrentHashMap<>(); + this.evictionsMap = new ConcurrentHashMap<>(); + this.memorySizeMap = new ConcurrentHashMap<>(); + this.entriesMap = new ConcurrentHashMap<>(); this.totalHits = new CounterMetric(); this.totalMisses = new CounterMetric(); @@ -212,8 +214,8 @@ private Map convertCounterMapToLong(Map inp return result; } - private Map convertLongMapToCounterMetric(Map inputMap) { - Map result = new HashMap<>(); + private ConcurrentMap convertLongMapToCounterMetric(Map inputMap) { + ConcurrentMap result = new ConcurrentHashMap<>(); for (String key: inputMap.keySet()) { CounterMetric counter = new CounterMetric(); counter.inc(inputMap.get(key)); From 605968065d040fecc4284399ba6d64878b2b09d6 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Wed, 14 Feb 2024 15:02:23 -0800 Subject: [PATCH 17/32] Made SingleDimensionCacheStats also take in tier dimensions Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 3 +- .../store/disk/EhCacheDiskCacheTests.java | 30 ++++++++ .../cache/stats/CacheStatsDimension.java | 4 ++ .../stats/SingleDimensionCacheStats.java | 69 +++++++++++++++---- .../cache/store/OpenSearchOnHeapCache.java | 3 +- .../stats/SingleDimensionCacheStatsTests.java | 36 ++++++++-- 6 files changed, 124 insertions(+), 21 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 44c272058733a..930a1c2cd9bf1 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -24,6 +24,7 @@ import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.ICacheKey; +import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.enums.CacheStoreType; @@ -148,7 +149,7 @@ private EhcacheDiskCache(Builder builder) { this.valueSerializer); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); this.shardIdDimensionName = Objects.requireNonNull(builder.shardIdDimensionName, "Dimension name can't be null"); - this.stats = new SingleDimensionCacheStats(shardIdDimensionName); + this.stats = new SingleDimensionCacheStats(shardIdDimensionName, CacheStatsDimension.TIER_DIMENSION_VALUE_DISK); } private Cache buildCache(Duration expireAfterAccess, Builder builder) { diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index b00436b5c1764..76f675486d362 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -607,6 +607,36 @@ public void testMemoryTracking() throws Exception { } } + public void testGetStatsByTierName() throws Exception { + Settings settings = Settings.builder().build(); + MockRemovalListener mockRemovalListener = new MockRemovalListener<>(); + ToLongBiFunction, String> weigher = getWeigher(); + try (NodeEnvironment env = newNodeEnvironment(settings)) { + ICache ehcacheTest = new EhcacheDiskCache.Builder().setThreadPoolAlias("ehcacheTest") + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setKeyType(String.class) + .setValueType(String.class) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setShardIdDimensionName(dimensionName) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) + .setSettings(settings) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setRemovalListener(mockRemovalListener) + .setWeigher(weigher) + .build(); + int randomKeys = randomIntBetween(10, 100); + for (int i = 0; i < randomKeys; i++) { + ehcacheTest.put(getICacheKey(UUID.randomUUID().toString()), UUID.randomUUID().toString()); + } + assertEquals(randomKeys, ehcacheTest.stats().getEntriesByDimensions(List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, CacheStatsDimension.TIER_DIMENSION_VALUE_DISK)))); + assertEquals(0, ehcacheTest.stats().getEntriesByDimensions(List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, CacheStatsDimension.TIER_DIMENSION_VALUE_ON_HEAP)))); + + ehcacheTest.close(); + } + } + private static String generateRandomString(int length) { String characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; StringBuilder randomString = new StringBuilder(length); diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java index 93956fbeb42f4..4abdbff5d5a4a 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java @@ -16,6 +16,10 @@ import java.util.Objects; public class CacheStatsDimension implements Writeable { + // Values for tier dimensions, that are reused across CacheStats implementations + public static final String TIER_DIMENSION_NAME = "tier"; + public static final String TIER_DIMENSION_VALUE_ON_HEAP = "on_heap"; + public static final String TIER_DIMENSION_VALUE_DISK = "disk"; public final String dimensionName; public final String dimensionValue; public CacheStatsDimension(String dimensionName, String dimensionValue) { diff --git a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java index b0cc183a78bc7..fc0856e227c73 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java @@ -14,6 +14,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -21,7 +22,7 @@ import java.util.concurrent.ConcurrentMap; /** - * A CacheStats implementation for caches that aggregate over a single dimension. + * A CacheStats implementation for caches that aggregate over a single dimension, as well as holding a tier dimension. * For example, caches in the IndicesRequestCache only aggregate over ShardId value. */ public class SingleDimensionCacheStats implements CacheStats { @@ -39,10 +40,12 @@ public class SingleDimensionCacheStats implements CacheStats { private final CounterMetric totalMemorySize; private final CounterMetric totalEntries; - // The allowed dimension name. This stats only allows a single dimension name - private final String allowedDimensionName; + // The allowed dimension name. This stats only allows a single dimension name. Package-private for testing. + final String allowedDimensionName; + // The value of the tier dimension for entries in this Stats object. Package-private for testing. + final String tierDimensionValue; - public SingleDimensionCacheStats(String allowedDimensionName) { + public SingleDimensionCacheStats(String allowedDimensionName, String tierDimensionValue) { this.hitsMap = new ConcurrentHashMap<>(); this.missesMap = new ConcurrentHashMap<>(); this.evictionsMap = new ConcurrentHashMap<>(); @@ -56,6 +59,7 @@ public SingleDimensionCacheStats(String allowedDimensionName) { this.totalEntries = new CounterMetric(); this.allowedDimensionName = allowedDimensionName; + this.tierDimensionValue = tierDimensionValue; } public SingleDimensionCacheStats(StreamInput in) throws IOException { @@ -77,6 +81,7 @@ public SingleDimensionCacheStats(StreamInput in) throws IOException { totalEntries.inc(in.readVLong()); this.allowedDimensionName = in.readString(); + this.tierDimensionValue = in.readString(); } @Override @@ -104,7 +109,34 @@ public long getTotalEntries() { return this.totalEntries.count(); } - private long internalGetByDimension(List dimensions, Map metricsMap) { + private long internalGetByDimension(List dimensions, Map metricsMap, CounterMetric totalMetric) { + CacheStatsDimension tierDimension = getTierDimensionIfPresent(dimensions); + if (tierDimension != null) { + // This get request includes a tier dimension. Return values only if the tier dimension value + // matches the one for this stats object, otherwise return 0 + assert dimensions.size() == 1 || dimensions.size() == 2; // There can be at most one non-tier dimension value + if (tierDimension.dimensionValue.equals(tierDimensionValue)) { + // The list passed in may not be mutable; create a mutable copy to remove the tier dimension + ArrayList modifiedDimensions = new ArrayList<>(dimensions); + modifiedDimensions.remove(tierDimension); + + if (modifiedDimensions.size() == 1){ + return internalGetHelper(modifiedDimensions, metricsMap); + } else { + return totalMetric.count(); + } + + } else { + // Return 0 for incorrect tier value + return 0; + } + } else { + // This get request doesn't include a tier dimension. Return the appropriate values. + return internalGetHelper(dimensions, metricsMap); + } + } + + private long internalGetHelper(List dimensions, Map metricsMap) { assert dimensions.size() == 1; CounterMetric counter = metricsMap.get(dimensions.get(0).dimensionValue); if (counter == null) { @@ -113,29 +145,41 @@ private long internalGetByDimension(List dimensions, Map dimensions) { + for (CacheStatsDimension dim : dimensions) { + if (dim.dimensionName.equals(CacheStatsDimension.TIER_DIMENSION_NAME)) { + return dim; + } + } + return null; + } + @Override public long getHitsByDimensions(List dimensions) { - return internalGetByDimension(dimensions, hitsMap); + return internalGetByDimension(dimensions, hitsMap, totalHits); } @Override public long getMissesByDimensions(List dimensions) { - return internalGetByDimension(dimensions, missesMap); + return internalGetByDimension(dimensions, missesMap, totalMisses); } @Override public long getEvictionsByDimensions(List dimensions) { - return internalGetByDimension(dimensions, evictionsMap); + return internalGetByDimension(dimensions, evictionsMap, totalEvictions); } @Override public long getMemorySizeByDimensions(List dimensions) { - return internalGetByDimension(dimensions, memorySizeMap); + return internalGetByDimension(dimensions, memorySizeMap, totalMemorySize); } @Override public long getEntriesByDimensions(List dimensions) { - return internalGetByDimension(dimensions, entriesMap); + return internalGetByDimension(dimensions, entriesMap, totalEntries); } private boolean checkDimensionList(List dimensions) { @@ -199,10 +243,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(totalEntries.count()); out.writeString(allowedDimensionName); - } - - public String getAllowedDimensionName() { - return allowedDimensionName; + out.writeString(tierDimensionValue); } // For converting to StreamOutput/StreamInput, write maps of longs rather than CounterMetrics which don't support writing diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index d1273b12813c6..59763c8442949 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -17,6 +17,7 @@ import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.ICacheKey; +import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.settings.Settings; @@ -49,7 +50,7 @@ public OpenSearchOnHeapCache(Builder builder) { } cache = cacheBuilder.build(); String dimensionName = Objects.requireNonNull(builder.shardIdDimensionName, "Shard id dimension name can't be null"); - this.stats = new SingleDimensionCacheStats(dimensionName); + this.stats = new SingleDimensionCacheStats(dimensionName, CacheStatsDimension.TIER_DIMENSION_VALUE_ON_HEAP); this.removalListener = builder.getRemovalListener(); } diff --git a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java index 2e0cb56fda3c9..89c1e3453e640 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java @@ -22,8 +22,9 @@ public class SingleDimensionCacheStatsTests extends OpenSearchTestCase { private final String dimensionName = "shardId"; + private final String tierName = "test_tier"; public void testAddAndGet() throws Exception { - StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(); + StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(tierName); SingleDimensionCacheStats stats = statsAndExpectedResults.stats; checkShardResults(statsAndExpectedResults); @@ -32,10 +33,34 @@ public void testAddAndGet() throws Exception { // Check values returned for a nonexistent dimension value or name return 0 assertEquals(0, stats.getHitsByDimensions(List.of(new CacheStatsDimension(dimensionName, "nonexistent")))); assertEquals(0, stats.getHitsByDimensions(List.of(new CacheStatsDimension("nonexistentName", "nonexistentValue")))); + + // Check sending too many values causes an assertion error + assertThrows(AssertionError.class, () -> stats.getHitsByDimensions(List.of(getDim(0), new CacheStatsDimension("test", "value")))); + } + + public void testTierFiltering() throws Exception { + StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(tierName); + SingleDimensionCacheStats stats = statsAndExpectedResults.stats; + + // Values should be returned if the tier dimension value matches the one passed to SingleDimensionCacheStats. Otherwise we should get 0. + CacheStatsDimension matchingTierDim = new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, tierName); + CacheStatsDimension nonMatchingTierDim = new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, "another_tier"); + + assertEquals(stats.getTotalHits(), stats.getHitsByDimensions(List.of(matchingTierDim))); + assertEquals(0, stats.getHitsByDimensions(List.of(nonMatchingTierDim))); + for (int i = 0; i < statsAndExpectedResults.numShardIds; i++) { + assertEquals(stats.getHitsByDimensions(List.of(getDim(i))), stats.getHitsByDimensions(List.of(getDim(i), matchingTierDim))); + assertEquals(stats.getHitsByDimensions(List.of(getDim(i))), stats.getHitsByDimensions(List.of(matchingTierDim, getDim(i)))); + assertEquals(0, stats.getHitsByDimensions(List.of(getDim(i), nonMatchingTierDim))); + assertEquals(0, stats.getHitsByDimensions(List.of(nonMatchingTierDim, getDim(i)))); + + } + // Check sending too many values causes an assertion error + assertThrows(AssertionError.class, () -> stats.getHitsByDimensions(List.of(getDim(0), matchingTierDim, new CacheStatsDimension("test", "value")))); } public void testSerialization() throws Exception { - StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(); + StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(tierName); SingleDimensionCacheStats stats = statsAndExpectedResults.stats; Map> expectedResults = statsAndExpectedResults.expectedShardResults; @@ -47,7 +72,8 @@ public void testSerialization() throws Exception { StatsAndExpectedResults deserializedStatsAndExpectedResults = new StatsAndExpectedResults(deserialized, expectedResults, statsAndExpectedResults.numShardIds); checkShardResults(deserializedStatsAndExpectedResults); checkTotalResults(deserializedStatsAndExpectedResults); - assertEquals(deserialized.getAllowedDimensionName(), stats.getAllowedDimensionName()); + assertEquals(deserialized.allowedDimensionName, stats.allowedDimensionName); + assertEquals(deserialized.tierDimensionValue, stats.tierDimensionValue); } private CacheStatsDimension getDim(int i) { @@ -68,8 +94,8 @@ private long sumMap(Map inputMap) { return result; } - private StatsAndExpectedResults getPopulatedStats() { - SingleDimensionCacheStats stats = new SingleDimensionCacheStats(dimensionName); + private StatsAndExpectedResults getPopulatedStats(String tierName) { + SingleDimensionCacheStats stats = new SingleDimensionCacheStats(dimensionName, tierName); int numShardIds = 10; Map expectedHits = new HashMap<>(); From 7f5a4550379331b41dc3ca48d4b8ea2fd5f19d0e Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Wed, 14 Feb 2024 15:51:41 -0800 Subject: [PATCH 18/32] Added overall CacheStatsResponse object packaging all 5 metrics Signed-off-by: Peter Alfonsi --- .../common/cache/stats/CacheStats.java | 25 ++++++++++++-- .../common/cache/stats/CacheStatsBase.java | 34 ------------------- .../cache/stats/CacheStatsResponse.java | 28 +++++++++++++++ .../stats/SingleDimensionCacheStats.java | 16 +++++++++ .../cache/tier/TieredSpilloverCacheStats.java | 11 ++++++ .../stats/SingleDimensionCacheStatsTests.java | 19 +++++++++++ 6 files changed, 97 insertions(+), 36 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/common/cache/stats/CacheStatsBase.java create mode 100644 server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java index 3aebb49145128..9d53585708dd7 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -8,7 +8,7 @@ package org.opensearch.common.cache.stats; -import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.common.io.stream.Writeable; import java.util.List; @@ -18,7 +18,28 @@ * When updating stats, we take in the list of dimensions associated with the key/value pair that caused the update. * This allows us to aggregate stats by dimension when accessing them. */ -public interface CacheStats extends CacheStatsBase { // TODO: Make this extend ToXContentFragment too +public interface CacheStats extends Writeable { + + // Methods to get all 5 values at once, either in total or for a specific set of dimensions. + CacheStatsResponse getTotalStats(); + CacheStatsResponse getStatsByDimensions(List dimensions); + + // Methods to get total values. + long getTotalHits(); + long getTotalMisses(); + long getTotalEvictions(); + long getTotalMemorySize(); + long getTotalEntries(); + + // Methods to get values for a specific set of dimensions. + // Returns the sum of values for cache entries that match all dimensions in the list. + long getHitsByDimensions(List dimensions); + long getMissesByDimensions(List dimensions); + long getEvictionsByDimensions(List dimensions); + long getMemorySizeByDimensions(List dimensions); + long getEntriesByDimensions(List dimensions); + + void incrementHitsByDimensions(List dimensions); void incrementMissesByDimensions(List dimensions); void incrementEvictionsByDimensions(List dimensions); diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsBase.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsBase.java deleted file mode 100644 index 779bfba937e37..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsBase.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.stats; - -import org.opensearch.core.common.io.stream.Writeable; - -import java.util.List; - -/** - * An interface for accessing stats for a specific cache. Stats can be gotten as totals, or only for specific - * dimension values. - */ -public interface CacheStatsBase extends Writeable { // TODO: Also extend ToXContentFragment - // Methods to get total values. - long getTotalHits(); - long getTotalMisses(); - long getTotalEvictions(); - long getTotalMemorySize(); - long getTotalEntries(); - - // Methods to get values for a specific set of dimensions. - // Returns the sum of values for cache entries that match all dimensions in the list. - long getHitsByDimensions(List dimensions); - long getMissesByDimensions(List dimensions); - long getEvictionsByDimensions(List dimensions); - long getMemorySizeByDimensions(List dimensions); - long getEntriesByDimensions(List dimensions); -} diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java new file mode 100644 index 0000000000000..af0614fe926f4 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java @@ -0,0 +1,28 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +/** + * A class containing the 5 metrics tracked by a CacheStats object. + */ +public class CacheStatsResponse { // TODO: Make this extend ToXContent. + public final long hits; + public final long misses; + public final long evictions; + public final long memorySize; + public final long entries; + + public CacheStatsResponse(long hits, long misses, long evictions, long memorySize, long entries) { + this.hits = hits; + this.misses = misses; + this.evictions = evictions; + this.memorySize = memorySize; + this.entries = entries; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java index fc0856e227c73..9391e0049fd47 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java @@ -84,6 +84,22 @@ public SingleDimensionCacheStats(StreamInput in) throws IOException { this.tierDimensionValue = in.readString(); } + @Override + public CacheStatsResponse getTotalStats() { + return new CacheStatsResponse(getTotalHits(), getTotalMisses(), getTotalEvictions(), getTotalMemorySize(), getTotalEntries()); + } + + @Override + public CacheStatsResponse getStatsByDimensions(List dimensions) { + return new CacheStatsResponse( + getHitsByDimensions(dimensions), + getMissesByDimensions(dimensions), + getEvictionsByDimensions(dimensions), + getMemorySizeByDimensions(dimensions), + getEntriesByDimensions(dimensions) + ); + } + @Override public long getTotalHits() { return this.totalHits.count(); diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java index c86d53c8438f5..e8ccc0422a704 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java @@ -10,6 +10,7 @@ import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.stats.CacheStatsDimension; +import org.opensearch.common.cache.stats.CacheStatsResponse; import org.opensearch.core.common.io.stream.StreamOutput; import java.io.IOException; @@ -30,6 +31,16 @@ public void writeTo(StreamOutput out) throws IOException { } + @Override + public CacheStatsResponse getTotalStats() { + return null; + } + + @Override + public CacheStatsResponse getStatsByDimensions(List dimensions) { + return null; + } + @Override public long getTotalHits() { return 0; diff --git a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java index 89c1e3453e640..1c94ce9322622 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java @@ -170,11 +170,20 @@ private void checkShardResults(StatsAndExpectedResults statsAndExpectedResults) String shardIdString = String.valueOf(shardId); CacheStatsDimension dimension = getDim(shardId); + // Check the individual metric getters assertEquals((long) expectedResults.get("hits").get(shardIdString), stats.getHitsByDimensions(List.of(dimension))); assertEquals((long) expectedResults.get("misses").get(shardIdString), stats.getMissesByDimensions(List.of(dimension))); assertEquals((long) expectedResults.get("evictions").get(shardIdString), stats.getEvictionsByDimensions(List.of(dimension))); assertEquals((long) expectedResults.get("memory_size").get(shardIdString), stats.getMemorySizeByDimensions(List.of(dimension))); assertEquals((long) expectedResults.get("entries").get(shardIdString), stats.getEntriesByDimensions(List.of(dimension))); + + // Check the total metric getter + CacheStatsResponse response = stats.getStatsByDimensions(List.of(dimension)); + assertEquals((long) expectedResults.get("hits").get(shardIdString), response.hits); + assertEquals((long) expectedResults.get("misses").get(shardIdString), response.misses); + assertEquals((long) expectedResults.get("evictions").get(shardIdString), response.evictions); + assertEquals((long) expectedResults.get("memory_size").get(shardIdString), response.memorySize); + assertEquals((long) expectedResults.get("entries").get(shardIdString), response.entries); } } @@ -182,11 +191,21 @@ private void checkTotalResults(StatsAndExpectedResults statsAndExpectedResults) // check resulting total values are what we expect Map> expectedResults = statsAndExpectedResults.expectedShardResults; SingleDimensionCacheStats stats = statsAndExpectedResults.stats; + + // Check the individual metric getters assertEquals(sumMap(expectedResults.get("hits")), stats.getTotalHits()); assertEquals(sumMap(expectedResults.get("misses")), stats.getTotalMisses()); assertEquals(sumMap(expectedResults.get("evictions")), stats.getTotalEvictions()); assertEquals(sumMap(expectedResults.get("memory_size")), stats.getTotalMemorySize()); assertEquals(sumMap(expectedResults.get("entries")), stats.getTotalEntries()); + + // Check the total metric getter + CacheStatsResponse totalResponse = stats.getTotalStats(); + assertEquals(sumMap(expectedResults.get("hits")), totalResponse.hits); + assertEquals(sumMap(expectedResults.get("misses")), totalResponse.misses); + assertEquals(sumMap(expectedResults.get("evictions")), totalResponse.evictions); + assertEquals(sumMap(expectedResults.get("memory_size")), totalResponse.memorySize); + assertEquals(sumMap(expectedResults.get("entries")), totalResponse.entries); } // Convenience class to allow reusing setup code across tests From 36c600d9836d35d48d06eef602e0cbd97025bc8f Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Wed, 14 Feb 2024 15:52:07 -0800 Subject: [PATCH 19/32] Removed skeleton TSC stats implementation Signed-off-by: Peter Alfonsi --- .../stats/SingleDimensionCacheStats.java | 13 -- .../cache/tier/TieredSpilloverCacheStats.java | 123 ------------------ 2 files changed, 136 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCacheStats.java diff --git a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java index 9391e0049fd47..63f8ab27e43c2 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java @@ -280,17 +280,4 @@ private ConcurrentMap convertLongMapToCounterMetric(Map dimensions) { - return null; - } - - @Override - public long getTotalHits() { - return 0; - } - - @Override - public long getTotalMisses() { - return 0; - } - - @Override - public long getTotalEvictions() { - return 0; - } - - @Override - public long getTotalMemorySize() { - return 0; - } - - @Override - public long getTotalEntries() { - return 0; - } - - @Override - public long getHitsByDimensions(List dimensions) { - return 0; - } - - @Override - public long getMissesByDimensions(List dimensions) { - return 0; - } - - @Override - public long getEvictionsByDimensions(List dimensions) { - return 0; - } - - @Override - public long getMemorySizeByDimensions(List dimensions) { - return 0; - } - - @Override - public long getEntriesByDimensions(List dimensions) { - return 0; - } - - @Override - public void incrementHitsByDimensions(List dimensions) { - - } - - @Override - public void incrementMissesByDimensions(List dimensions) { - - } - - @Override - public void incrementEvictionsByDimensions(List dimensions) { - - } - - @Override - public void incrementMemorySizeByDimensions(List dimensions, long amountBytes) { - - } - - @Override - public void incrementEntriesByDimensions(List dimensions) { - - } - - @Override - public void decrementEntriesByDimensions(List dimensions) { - - } -} From c8dc1b369a9cbd1970d2827fdfab09a55d8d56ea Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Mon, 26 Feb 2024 15:11:54 -0800 Subject: [PATCH 20/32] Modified factories to take new arguments Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 20 +++++++++ .../store/disk/EhCacheDiskCacheTests.java | 17 ++++---- .../cache/store/OpenSearchOnHeapCache.java | 9 ++-- .../cache/store/config/CacheConfig.java | 42 +++++++++++++++++++ 4 files changed, 75 insertions(+), 13 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 85253b3538a7e..08729c54188a1 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -482,6 +482,7 @@ public void onEvent(CacheEvent, ? extends byte[]> event) this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.EVICTED)); stats.decrementEntriesByDimensions(event.getKey().dimensions); stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getOldValuePairSize(event)); + stats.incrementEvictionsByDimensions(event.getKey().dimensions); assert event.getNewValue() == null; break; case REMOVED: @@ -556,11 +557,30 @@ public EhcacheDiskCacheFactory() {} public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { Map> settingList = EhcacheDiskCacheSettings.getSettingListForCacheType(cacheType); Settings settings = config.getSettings(); + + Serializer keySerializer = null; + try { + keySerializer = (Serializer) config.getKeySerializer(); + } catch (ClassCastException e) { + throw new IllegalArgumentException("EhcacheDiskCache requires a key serializer of type Serializer"); + } + + Serializer valueSerializer = null; + try { + valueSerializer = (Serializer) config.getValueSerializer(); + } catch (ClassCastException e) { + throw new IllegalArgumentException("EhcacheDiskCache requires a value serializer of type Serializer"); + } + return new Builder().setStoragePath((String) settingList.get(DISK_STORAGE_PATH_KEY).get(settings)) .setDiskCacheAlias((String) settingList.get(DISK_CACHE_ALIAS_KEY).get(settings)) .setCacheType(cacheType) .setKeyType((config.getKeyType())) .setValueType(config.getValueType()) + .setKeySerializer(keySerializer) + .setValueSerializer(valueSerializer) + .setShardIdDimensionName(config.getDimensionNames().get(0)) // TODO: Rework this to pass in whole list, once stats is changed + .setWeigher(config.getWeigher()) .setRemovalListener(config.getRemovalListener()) .setExpireAfterAccess((TimeValue) settingList.get(DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY).get(settings)) .setMaximumWeightInBytes((Long) settingList.get(DISK_MAX_SIZE_IN_BYTES_KEY).get(settings)) diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index eb37db4a0bd3d..a43ae63a1cf49 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -110,6 +110,10 @@ public void testBasicGetAndPutUsingFactory() throws IOException { new CacheConfig.Builder().setValueType(String.class) .setKeyType(String.class) .setRemovalListener(removalListener) + .setKeySerializer(new StringSerializer()) + .setValueSerializer(new StringSerializer()) + .setDimensionNames(List.of(dimensionName)) + .setWeigher(getWeigher()) .setSettings( Settings.builder() .put( @@ -316,19 +320,11 @@ public void testEvictions() throws Exception { String value = generateRandomString(100); // Trying to generate more than 100kb to cause evictions. - long sizeOfAttemptedAdds = 0; - long sizeOfAttemptedAddsValue = 0; for (int i = 0; i < 1000; i++) { String key = "Key" + i; ICacheKey iCacheKey = getICacheKey((key)); - sizeOfAttemptedAdds += weigher.applyAsLong(iCacheKey, value); ehcacheTest.put(iCacheKey, value); - } - /*System.out.println("Total size of attempted adds = " + sizeOfAttemptedAdds); - System.out.println("Total size of attempted adds (value only) = " + sizeOfAttemptedAddsValue); - System.out.println("Total memory size = " + ehcacheTest.stats().getTotalMemorySize());*/ - // TODO: Figure out why ehcache is evicting at ~30-40% of its max size rather than 100% (see commented out prints above) assertTrue(mockRemovalListener.onRemovalCount.get() > 0); assertEquals(660, ehcacheTest.stats().getTotalEvictions()); ehcacheTest.close(); @@ -539,8 +535,9 @@ public String load(ICacheKey key) throws Exception { } public void testMemoryTracking() throws Exception { - // This test leaks threads because of an issue in Ehcache: + // TODO: This test leaks threads because of an issue in Ehcache: // https://github.com/ehcache/ehcache3/issues/3204 + // Test all cases for EhCacheEventListener.onEvent and check stats memory usage is updated correctly Settings settings = Settings.builder().build(); ToLongBiFunction, String> weigher = getWeigher(); @@ -685,7 +682,7 @@ public void onRemoval(RemovalNotification, V> notification) { } } - private static class StringSerializer implements Serializer { + static class StringSerializer implements Serializer { private final Charset charset = StandardCharsets.UTF_8; @Override public byte[] serialize(String object) { diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index a77ef4653f951..e0b06406d8f6d 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -142,9 +142,12 @@ public static class OpenSearchOnHeapCacheFactory implements Factory { public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { Map> settingList = OpenSearchOnHeapCacheSettings.getSettingListForCacheType(cacheType); Settings settings = config.getSettings(); - return new Builder().setMaximumWeightInBytes( - ((ByteSizeValue) settingList.get(MAXIMUM_SIZE_IN_BYTES_KEY).get(settings)).getBytes() - ).setWeigher(config.getWeigher()).setRemovalListener(config.getRemovalListener()).build(); + return new Builder() + .setShardIdDimensionName(config.getDimensionNames().get(0)) //TODO: Make it accept >1 dimension names + .setMaximumWeightInBytes(((ByteSizeValue) settingList.get(MAXIMUM_SIZE_IN_BYTES_KEY).get(settings)).getBytes()) + .setWeigher(config.getWeigher()) + .setRemovalListener(config.getRemovalListener()) + .build(); } @Override diff --git a/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java b/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java index c837ee899a283..0bf325cdd5a86 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java +++ b/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java @@ -11,8 +11,10 @@ import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.ICacheKey; +import org.opensearch.common.cache.serializer.Serializer; import org.opensearch.common.settings.Settings; +import java.util.List; import java.util.function.ToLongBiFunction; /** @@ -42,12 +44,20 @@ public class CacheConfig { private final RemovalListener, V> removalListener; + private final Serializer keySerializer; + private final Serializer valueSerializer; + + private final List dimensionNames; + private CacheConfig(Builder builder) { this.keyType = builder.keyType; this.valueType = builder.valueType; this.settings = builder.settings; this.removalListener = builder.removalListener; this.weigher = builder.weigher; + this.keySerializer = builder.keySerializer; + this.valueSerializer = builder.valueSerializer; + this.dimensionNames = builder.dimensionNames; } public RemovalListener, V> getRemovalListener() { @@ -70,6 +80,18 @@ public ToLongBiFunction, V> getWeigher() { return weigher; } + public Serializer getKeySerializer() { + return keySerializer; + } + + public Serializer getValueSerializer() { + return valueSerializer; + } + + public List getDimensionNames() { + return dimensionNames; + } + /** * Builder class to build Cache config related parameters. * @param Type of key. @@ -86,6 +108,11 @@ public static class Builder { private ToLongBiFunction, V> weigher; + private Serializer keySerializer; + private Serializer valueSerializer; + + private List dimensionNames; + public Builder() {} public Builder setSettings(Settings settings) { @@ -113,6 +140,21 @@ public Builder setWeigher(ToLongBiFunction, V> weigher) { return this; } + public Builder setKeySerializer(Serializer keySerializer) { + this.keySerializer = keySerializer; + return this; + } + + public Builder setValueSerializer(Serializer valueSerializer) { + this.valueSerializer = valueSerializer; + return this; + } + + public Builder setDimensionNames(List dimensionNames) { + this.dimensionNames = dimensionNames; + return this; + } + public CacheConfig build() { return new CacheConfig<>(this); } From 4abd6027cceb05751568970b951ffb57326b92ef Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Mon, 26 Feb 2024 16:04:37 -0800 Subject: [PATCH 21/32] added utilitly fns to CacheStatsResponse Signed-off-by: Peter Alfonsi --- .../cache/stats/CacheStatsResponse.java | 52 +++++++++++++++++-- 1 file changed, 47 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java index af0614fe926f4..7301a999b7314 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java @@ -8,15 +8,17 @@ package org.opensearch.common.cache.stats; +import java.util.Objects; + /** * A class containing the 5 metrics tracked by a CacheStats object. */ public class CacheStatsResponse { // TODO: Make this extend ToXContent. - public final long hits; - public final long misses; - public final long evictions; - public final long memorySize; - public final long entries; + public long hits; + public long misses; + public long evictions; + public long memorySize; + public long entries; public CacheStatsResponse(long hits, long misses, long evictions, long memorySize, long entries) { this.hits = hits; @@ -25,4 +27,44 @@ public CacheStatsResponse(long hits, long misses, long evictions, long memorySiz this.memorySize = memorySize; this.entries = entries; } + + public CacheStatsResponse() { + this.hits = 0; + this.misses = 0; + this.evictions = 0; + this.memorySize = 0; + this.entries = 0; + } + + public void add(CacheStatsResponse other) { + if (other == null) { + return; + } + this.hits += other.hits; + this.misses += other.misses; + this.evictions += other.evictions; + this.memorySize += other.memorySize; + this.entries += other.entries; + } + + @Override + public boolean equals(Object o) { + if (o == null) { + return false; + } + if (o.getClass() != CacheStatsResponse.class) { + return false; + } + CacheStatsResponse other = (CacheStatsResponse) o; + return (hits == other.hits) + && (misses == other.misses) + && (evictions == other.evictions) + && (memorySize == other.memorySize) + && (entries == other.entries); + } + + @Override + public int hashCode() { + return Objects.hash(hits, misses, evictions, memorySize, entries); + } } From 2586fa1f82332e7b649428662e0c57c68c6a7e85 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Tue, 27 Feb 2024 10:23:32 -0800 Subject: [PATCH 22/32] Making TieredCachePlugin constructor public Signed-off-by: Sagar Upadhyaya --- .../cache/common/tier/TieredSpilloverCachePlugin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java index 19abf8ae63c28..6b0620c5fbede 100644 --- a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java +++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java @@ -31,7 +31,7 @@ public class TieredSpilloverCachePlugin extends Plugin implements CachePlugin { /** * Default constructor */ - TieredSpilloverCachePlugin() {} + public TieredSpilloverCachePlugin() {} @Override public Map getCacheFactoryMap() { From 6a2b374acfe737e34eb41cdc842496170f5c1932 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Tue, 27 Feb 2024 12:10:32 -0800 Subject: [PATCH 23/32] Fixing CacheService unit test Signed-off-by: Sagar Upadhyaya --- .../opensearch/common/cache/service/CacheServiceTests.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java b/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java index 9b821a3b2a9cb..9d39f8a43ea58 100644 --- a/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java +++ b/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java @@ -23,6 +23,8 @@ import java.util.Map; import static junit.framework.TestCase.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -42,7 +44,7 @@ public void testWithCreateCacheForIndicesRequestCacheType() { ); CacheConfig config = mock(CacheConfig.class); ICache onHeapCache = mock(OpenSearchOnHeapCache.class); - when(factory1.create(config, CacheType.INDICES_REQUEST_CACHE, factoryMap)).thenReturn(onHeapCache); + when(factory1.create(eq(config), eq(CacheType.INDICES_REQUEST_CACHE), any(Map.class))).thenReturn(onHeapCache); CacheService cacheService = cacheModule.getCacheService(); ICache ircCache = cacheService.createCache(config, CacheType.INDICES_REQUEST_CACHE); From 3e7ea262d4677b3949af1607f923f75bb1dee661 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Tue, 27 Feb 2024 13:46:52 -0800 Subject: [PATCH 24/32] Added multi dimension cache stats impl Signed-off-by: Peter Alfonsi --- .../cache/store/disk/EhcacheDiskCache.java | 15 +- .../store/disk/EhCacheDiskCacheTests.java | 20 +- .../cache/stats/CacheStatsResponse.java | 96 +++++-- .../cache/stats/MultiDimensionCacheStats.java | 252 ++++++++++++++++++ .../cache/store/OpenSearchOnHeapCache.java | 14 +- .../stats/MultiDimensionCacheStatsTests.java | 228 ++++++++++++++++ 6 files changed, 574 insertions(+), 51 deletions(-) create mode 100644 server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java create mode 100644 server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 08729c54188a1..372a2993ecb18 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -25,6 +25,7 @@ import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.stats.CacheStatsDimension; +import org.opensearch.common.cache.stats.MultiDimensionCacheStats; import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.serializer.ICacheKeySerializer; @@ -39,6 +40,7 @@ import java.nio.ByteBuffer; import java.time.Duration; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; @@ -110,7 +112,6 @@ public class EhcacheDiskCache implements ICache { private final Settings settings; private final CacheType cacheType; private final String diskCacheAlias; - private final String shardIdDimensionName; private final Serializer keySerializer; private final Serializer valueSerializer; @@ -153,8 +154,8 @@ private EhcacheDiskCache(Builder builder) { Objects.requireNonNull(builder.getWeigher(), "Weigher function can't be null"), this.valueSerializer); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); - this.shardIdDimensionName = Objects.requireNonNull(builder.shardIdDimensionName, "Dimension name can't be null"); - this.stats = new SingleDimensionCacheStats(shardIdDimensionName, CacheStatsDimension.TIER_DIMENSION_VALUE_DISK); + List dimensionNames = Objects.requireNonNull(builder.dimensionNames, "Dimension names can't be null"); + this.stats = new MultiDimensionCacheStats(dimensionNames, CacheStatsDimension.TIER_DIMENSION_VALUE_DISK); } private Cache buildCache(Duration expireAfterAccess, Builder builder) { @@ -579,7 +580,7 @@ public ICache create(CacheConfig config, CacheType cacheType, .setValueType(config.getValueType()) .setKeySerializer(keySerializer) .setValueSerializer(valueSerializer) - .setShardIdDimensionName(config.getDimensionNames().get(0)) // TODO: Rework this to pass in whole list, once stats is changed + .setDimensionNames(config.getDimensionNames()) .setWeigher(config.getWeigher()) .setRemovalListener(config.getRemovalListener()) .setExpireAfterAccess((TimeValue) settingList.get(DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY).get(settings)) @@ -609,7 +610,7 @@ public static class Builder extends ICacheBuilder { private boolean isEventListenerModeSync; private Class keyType; private Class valueType; - private String shardIdDimensionName; + private List dimensionNames; private Serializer keySerializer; private Serializer valueSerializer; @@ -688,8 +689,8 @@ public Builder setIsEventListenerModeSync(boolean isEventListenerModeSync) return this; } - public Builder setShardIdDimensionName(String dimensionName) { - this.shardIdDimensionName = dimensionName; + public Builder setDimensionNames(List dimensionNames) { + this.dimensionNames = dimensionNames; return this; } diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index a43ae63a1cf49..337fe75c67131 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -59,7 +59,7 @@ public void testBasicGetAndPut() throws IOException { .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -169,7 +169,7 @@ public void testConcurrentPut() throws Exception { .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -218,7 +218,7 @@ public void testEhcacheParallelGets() throws Exception { .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -265,7 +265,7 @@ public void testEhcacheKeyIterator() throws Exception { .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -307,7 +307,7 @@ public void testEvictions() throws Exception { .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -343,7 +343,7 @@ public void testComputeIfAbsentConcurrently() throws Exception { .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -419,7 +419,7 @@ public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -480,7 +480,7 @@ public void testComputeIfAbsentWithNullValueLoading() throws Exception { .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) @@ -553,7 +553,7 @@ public void testMemoryTracking() throws Exception { .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setIsEventListenerModeSync(true) // Test fails if async; probably not all updates happen before checking stats .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) @@ -620,7 +620,7 @@ public void testGetStatsByTierName() throws Exception { .setValueType(String.class) .setKeySerializer(new StringSerializer()) .setValueSerializer(new StringSerializer()) - .setShardIdDimensionName(dimensionName) + .setDimensionNames(List.of(dimensionName)) .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java index 7301a999b7314..fededa326d3e5 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java @@ -8,43 +8,54 @@ package org.opensearch.common.cache.stats; +import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; + +import java.io.IOException; import java.util.Objects; /** * A class containing the 5 metrics tracked by a CacheStats object. */ -public class CacheStatsResponse { // TODO: Make this extend ToXContent. - public long hits; - public long misses; - public long evictions; - public long memorySize; - public long entries; +public class CacheStatsResponse implements Writeable { // TODO: Make this extend ToXContent. + public CounterMetric hits; + public CounterMetric misses; + public CounterMetric evictions; + public CounterMetric memorySize; + public CounterMetric entries; public CacheStatsResponse(long hits, long misses, long evictions, long memorySize, long entries) { - this.hits = hits; - this.misses = misses; - this.evictions = evictions; - this.memorySize = memorySize; - this.entries = entries; + this.hits = new CounterMetric(); + this.hits.inc(hits); + this.misses = new CounterMetric(); + this.misses.inc(misses); + this.evictions = new CounterMetric(); + this.evictions.inc(evictions); + this.memorySize = new CounterMetric(); + this.memorySize.inc(memorySize); + this.entries = new CounterMetric(); + this.entries.inc(entries); + } + + public CacheStatsResponse(StreamInput in) throws IOException { + this(in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong()); } public CacheStatsResponse() { - this.hits = 0; - this.misses = 0; - this.evictions = 0; - this.memorySize = 0; - this.entries = 0; + this(0,0,0,0,0); } - public void add(CacheStatsResponse other) { + public synchronized void add(CacheStatsResponse other) { if (other == null) { return; } - this.hits += other.hits; - this.misses += other.misses; - this.evictions += other.evictions; - this.memorySize += other.memorySize; - this.entries += other.entries; + this.hits.inc(other.hits.count()); + this.misses.inc(other.misses.count()); + this.evictions.inc(other.evictions.count()); + this.memorySize.inc(other.memorySize.count()); + this.entries.inc(other.entries.count()); } @Override @@ -56,15 +67,44 @@ public boolean equals(Object o) { return false; } CacheStatsResponse other = (CacheStatsResponse) o; - return (hits == other.hits) - && (misses == other.misses) - && (evictions == other.evictions) - && (memorySize == other.memorySize) - && (entries == other.entries); + return (hits.count() == other.hits.count()) + && (misses.count() == other.misses.count()) + && (evictions.count() == other.evictions.count()) + && (memorySize.count() == other.memorySize.count()) + && (entries.count() == other.entries.count()); } @Override public int hashCode() { - return Objects.hash(hits, misses, evictions, memorySize, entries); + return Objects.hash(hits.count(), misses.count(), evictions.count(), memorySize.count(), entries.count()); + } + + public long getHits() { + return hits.count(); + } + + public long getMisses() { + return misses.count(); + } + + public long getEvictions() { + return evictions.count(); + } + + public long getMemorySize() { + return memorySize.count(); + } + + public long getEntries() { + return entries.count(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(hits.count()); + out.writeVLong(misses.count()); + out.writeVLong(evictions.count()); + out.writeVLong(memorySize.count()); + out.writeVLong(entries.count()); } } diff --git a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java new file mode 100644 index 0000000000000..ba356335c11b4 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java @@ -0,0 +1,252 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.function.BiConsumer; + +/** + * A CacheStats object supporting multiple different dimensions. + * Also keeps track of a tier dimension, which is assumed to be the same for all values in the stats object. + * The tier dimension value should not be passed into the CacheStats API functions for updating values. + */ +public class MultiDimensionCacheStats implements CacheStats { + + /** + * For memory purposes, don't track stats for more than this many distinct combinations of dimension values. + */ + public final static int DEFAULT_MAX_DIMENSION_VALUES = 20_000; + + // pkg-private for testing + final List dimensionNames; + + // The value of the tier dimension for entries in this Stats object. This is handled separately for efficiency, + // as it always has the same value for every entry in the stats object. + // Package-private for testing. + final String tierDimensionValue; + + // A map from a set of cache stats dimensions -> stats for that combination of dimensions. Does not include the tier dimension in its keys. + final ConcurrentMap, CacheStatsResponse> map; + + final int maxDimensionValues; + CacheStatsResponse totalStats; + + public MultiDimensionCacheStats(List dimensionNames, String tierDimensionValue, int maxDimensionValues) { + this.dimensionNames = dimensionNames; + this.map = new ConcurrentHashMap<>(); + this.totalStats = new CacheStatsResponse(); + this.tierDimensionValue = tierDimensionValue; + this.maxDimensionValues = maxDimensionValues; + } + + public MultiDimensionCacheStats(List dimensionNames, String tierDimensionValue) { + this(dimensionNames, tierDimensionValue, DEFAULT_MAX_DIMENSION_VALUES); + } + + public MultiDimensionCacheStats(StreamInput in) throws IOException { + this.dimensionNames = List.of(in.readStringArray()); + this.tierDimensionValue = in.readString(); + Map, CacheStatsResponse> readMap = in.readMap( + i -> Set.of(i.readArray(CacheStatsDimension::new, CacheStatsDimension[]::new)), + CacheStatsResponse::new + ); + this.map = new ConcurrentHashMap, CacheStatsResponse>(readMap); + this.totalStats = new CacheStatsResponse(in); + this.maxDimensionValues = in.readVInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringArray(dimensionNames.toArray(new String[0])); + out.writeString(tierDimensionValue); + out.writeMap( + map, + (o, keySet) -> o.writeArray((o1, dim) -> ((CacheStatsDimension) dim).writeTo(o1), keySet.toArray()), + (o, response) -> response.writeTo(o) + ); + totalStats.writeTo(out); + out.writeVInt(maxDimensionValues); + } + + @Override + public CacheStatsResponse getTotalStats() { + return totalStats; + } + + @Override + public CacheStatsResponse getStatsByDimensions(List dimensions) { + if (!checkDimensionNames(dimensions)) { + throw new IllegalArgumentException("Can't get stats for unrecognized dimensions"); + } + + CacheStatsDimension tierDim = getTierDimension(dimensions); + if (tierDim == null || tierDim.dimensionValue.equals(tierDimensionValue)) { + // If there is no tier dimension, or if the tier dimension value matches the one for this stats object, return an aggregated response over the non-tier dimensions + List modifiedDimensions = new ArrayList<>(dimensions); + if (tierDim != null) { + modifiedDimensions.remove(tierDim); + } + + // I don't think there's a more efficient way to get arbitrary combinations of dimensions than to just keep a map + // and iterate through it, checking if keys match. We can't pre-aggregate because it would consume a lot of memory. + CacheStatsResponse response = new CacheStatsResponse(); + for (Set storedDimensions : map.keySet()) { + if (storedDimensions.containsAll(modifiedDimensions)) { + response.add(map.get(storedDimensions)); + } + } + return response; + } + // If the tier dimension doesn't match, return an all-zero response + return new CacheStatsResponse(); + } + + private CacheStatsDimension getTierDimension(List dimensions) { + for (CacheStatsDimension dim : dimensions) { + if (dim.dimensionName.equals(CacheStatsDimension.TIER_DIMENSION_NAME)) { + return dim; + } + } + return null; + } + + private boolean checkDimensionNames(List dimensions) { + for (CacheStatsDimension dim : dimensions) { + if (!dimensionNames.contains(dim.dimensionName) && !dim.dimensionName.equals(CacheStatsDimension.TIER_DIMENSION_NAME)) { + return false; + } + } + return true; + } + + private CacheStatsResponse getStatsBySingleDimension(CacheStatsDimension dimension) { + assert dimensionNames.size() == 1; + CacheStatsResponse response = new CacheStatsResponse(); + for (Set dimensions : map.keySet()) { + // Each set has only one element + for (CacheStatsDimension keyDimension : dimensions) { + if (keyDimension.dimensionValue.equals(dimension.dimensionValue)) { + response.add(map.get(dimensions)); + } + } + } + return response; + } + + @Override + public long getTotalHits() { + return totalStats.getHits(); + } + + @Override + public long getTotalMisses() { + return totalStats.getMisses(); + } + + @Override + public long getTotalEvictions() { + return totalStats.getEvictions(); + } + + @Override + public long getTotalMemorySize() { + return totalStats.getMemorySize(); + } + + @Override + public long getTotalEntries() { + return totalStats.getEntries(); + } + + @Override + public long getHitsByDimensions(List dimensions) { + return getStatsByDimensions(dimensions).getHits(); + } + + @Override + public long getMissesByDimensions(List dimensions) { + return getStatsByDimensions(dimensions).getMisses(); + } + + @Override + public long getEvictionsByDimensions(List dimensions) { + return getStatsByDimensions(dimensions).getEvictions(); + } + + @Override + public long getMemorySizeByDimensions(List dimensions) { + return getStatsByDimensions(dimensions).getMemorySize(); + } + + @Override + public long getEntriesByDimensions(List dimensions) { + return getStatsByDimensions(dimensions).getEntries(); + } + + @Override + public void incrementHitsByDimensions(List dimensions) { + internalIncrement(dimensions, (response, amount) -> response.hits.inc(amount), 1); + } + + @Override + public void incrementMissesByDimensions(List dimensions) { + internalIncrement(dimensions, (response, amount) -> response.misses.inc(amount), 1); + } + + @Override + public void incrementEvictionsByDimensions(List dimensions) { + internalIncrement(dimensions, (response, amount) -> response.evictions.inc(amount), 1); + } + + @Override + public void incrementMemorySizeByDimensions(List dimensions, long amountBytes) { + internalIncrement(dimensions, (response, amount) -> response.memorySize.inc(amount), amountBytes); + } + + @Override + public void incrementEntriesByDimensions(List dimensions) { + internalIncrement(dimensions, (response, amount) -> response.entries.inc(amount), 1); + } + + @Override + public void decrementEntriesByDimensions(List dimensions) { + internalIncrement(dimensions, (response, amount) -> response.entries.inc(amount), -1); + } + + private CacheStatsResponse internalGetStats(List dimensions) { + assert dimensions.size() == dimensionNames.size(); + CacheStatsResponse response = map.get(new HashSet<>(dimensions)); + if (response == null) { + if (map.size() < maxDimensionValues) { + response = new CacheStatsResponse(); + map.put(new HashSet<>(dimensions), response); + } else { + throw new RuntimeException("Cannot add new combination of dimension values to stats object; reached maximum"); + } + } + return response; + } + + private void internalIncrement(List dimensions, BiConsumer incrementer, long amount) { + CacheStatsResponse stats = internalGetStats(dimensions); + incrementer.accept(stats, amount); + incrementer.accept(totalStats, amount); + } + +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index e0b06406d8f6d..ff3274409e110 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -19,12 +19,14 @@ import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.stats.CacheStatsDimension; +import org.opensearch.common.cache.stats.MultiDimensionCacheStats; import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.config.CacheConfig; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; +import java.util.List; import java.util.Objects; import java.util.function.ToLongBiFunction; import org.opensearch.common.cache.store.builders.ICacheBuilder; @@ -58,8 +60,8 @@ public OpenSearchOnHeapCache(Builder builder) { cacheBuilder.setExpireAfterAccess(builder.getExpireAfterAcess()); } cache = cacheBuilder.build(); - String dimensionName = Objects.requireNonNull(builder.shardIdDimensionName, "Shard id dimension name can't be null"); - this.stats = new SingleDimensionCacheStats(dimensionName, CacheStatsDimension.TIER_DIMENSION_VALUE_ON_HEAP); + List dimensionNames = Objects.requireNonNull(builder.dimensionNames, "Dimension names can't be null"); + this.stats = new MultiDimensionCacheStats(dimensionNames, CacheStatsDimension.TIER_DIMENSION_VALUE_ON_HEAP); this.removalListener = builder.getRemovalListener(); } @@ -143,7 +145,7 @@ public ICache create(CacheConfig config, CacheType cacheType, Map> settingList = OpenSearchOnHeapCacheSettings.getSettingListForCacheType(cacheType); Settings settings = config.getSettings(); return new Builder() - .setShardIdDimensionName(config.getDimensionNames().get(0)) //TODO: Make it accept >1 dimension names + .setDimensionNames(config.getDimensionNames()) .setMaximumWeightInBytes(((ByteSizeValue) settingList.get(MAXIMUM_SIZE_IN_BYTES_KEY).get(settings)).getBytes()) .setWeigher(config.getWeigher()) .setRemovalListener(config.getRemovalListener()) @@ -163,10 +165,10 @@ public String getCacheName() { */ public static class Builder extends ICacheBuilder { - private String shardIdDimensionName; + private List dimensionNames; - public Builder setShardIdDimensionName(String dimensionName) { - this.shardIdDimensionName = dimensionName; + public Builder setDimensionNames(List dimensionNames) { + this.dimensionNames = dimensionNames; return this; } @Override diff --git a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java new file mode 100644 index 0000000000000..127b5c979f27a --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java @@ -0,0 +1,228 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +import org.opensearch.common.Randomness; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.UUID; + +public class MultiDimensionCacheStatsTests extends OpenSearchTestCase { + String tierDimensionValue = "tier"; + + public void testSerialization() throws Exception { + List dimensionNames = List.of("dim1", "dim2"); + MultiDimensionCacheStats stats = new MultiDimensionCacheStats(dimensionNames, tierDimensionValue); + Map> usedDimensionValues = getUsedDimensionValues(stats, 10); + populateStats(stats, usedDimensionValues, 100, 10); + + BytesStreamOutput os = new BytesStreamOutput(); + stats.writeTo(os); + BytesStreamInput is = new BytesStreamInput(BytesReference.toBytes(os.bytes())); + MultiDimensionCacheStats deserialized = new MultiDimensionCacheStats(is); + assertEquals(stats.map, deserialized.map); + assertEquals(stats.totalStats, deserialized.totalStats); + assertEquals(stats.dimensionNames, deserialized.dimensionNames); + } + + public void testAddAndGet() throws Exception { + List dimensionNames = List.of("dim1", "dim2", "dim3", "dim4"); + MultiDimensionCacheStats stats = new MultiDimensionCacheStats(dimensionNames, tierDimensionValue); + Map> usedDimensionValues = getUsedDimensionValues(stats, 10); + + Map, CacheStatsResponse> expected = populateStats(stats, usedDimensionValues, 1000, 10); + // test gets for each distinct combination of values + for (Set dimSet : expected.keySet()) { + List dims = new ArrayList<>(dimSet); + CacheStatsResponse expectedResponse = expected.get(dimSet); + CacheStatsResponse actual = stats.getStatsByDimensions(dims); + assertEquals(expectedResponse, actual); + + assertEquals(expectedResponse.getHits(), stats.getHitsByDimensions(dims)); + assertEquals(expectedResponse.getMisses(), stats.getMissesByDimensions(dims)); + assertEquals(expectedResponse.getEvictions(), stats.getEvictionsByDimensions(dims)); + assertEquals(expectedResponse.getMemorySize(), stats.getMemorySizeByDimensions(dims)); + assertEquals(expectedResponse.getEntries(), stats.getEntriesByDimensions(dims)); + } + + // test gets for aggregations of values: for example, dim1="a", dim2="b", but dim3 and dim4 can be anything + // test a random subset of these, there are combinatorially many possibilities + for (int i = 0; i < 1000; i++) { + List aggregationDims = getRandomDimList(stats.dimensionNames, usedDimensionValues, false, Randomness.get()); + CacheStatsResponse expectedResponse = new CacheStatsResponse(); + for (Set dimSet : expected.keySet()) { + if (dimSet.containsAll(aggregationDims)) { + // Confirmed via debug we get a reasonable number of matching dimensions with this setup + expectedResponse.add(expected.get(dimSet)); + } + } + assertEquals(expectedResponse, stats.getStatsByDimensions(aggregationDims)); + + assertEquals(expectedResponse.getHits(), stats.getHitsByDimensions(aggregationDims)); + assertEquals(expectedResponse.getMisses(), stats.getMissesByDimensions(aggregationDims)); + assertEquals(expectedResponse.getEvictions(), stats.getEvictionsByDimensions(aggregationDims)); + assertEquals(expectedResponse.getMemorySize(), stats.getMemorySizeByDimensions(aggregationDims)); + assertEquals(expectedResponse.getEntries(), stats.getEntriesByDimensions(aggregationDims)); + } + + // test gets for total + + CacheStatsResponse expectedTotal = new CacheStatsResponse(); + for (Set dimSet : expected.keySet()) { + expectedTotal.add(expected.get(dimSet)); + } + assertEquals(expectedTotal, stats.getTotalStats()); + + assertEquals(expectedTotal.getHits(), stats.getTotalHits()); + assertEquals(expectedTotal.getMisses(), stats.getTotalMisses()); + assertEquals(expectedTotal.getEvictions(), stats.getTotalEvictions()); + assertEquals(expectedTotal.getMemorySize(), stats.getTotalMemorySize()); + assertEquals(expectedTotal.getEntries(), stats.getTotalEntries()); + } + + public void testExceedsCap() throws Exception { + List dimensionNames = List.of("dim1", "dim2", "dim3", "dim4"); + MultiDimensionCacheStats stats = new MultiDimensionCacheStats(dimensionNames, tierDimensionValue, 1000); + Map> usedDimensionValues = getUsedDimensionValues(stats, 100); + + // Try a few more than MAX_DIMENSION_VALUES times because there can be collisions in the randomly selected dimension values + assertThrows(RuntimeException.class, () -> populateStats(stats, usedDimensionValues, (int) (stats.maxDimensionValues * 1.1), 10)); + } + + public void testEmptyDimsList() throws Exception { + // If the dimension list is empty, the map should have only one entry, from the empty set -> the total stats. + MultiDimensionCacheStats stats = new MultiDimensionCacheStats(List.of(), tierDimensionValue); + Map> usedDimensionValues = getUsedDimensionValues(stats, 100); + populateStats(stats, usedDimensionValues, 10, 100); + assertEquals(stats.totalStats, stats.getStatsByDimensions(List.of())); + assertEquals(stats.getTotalHits(), stats.getHitsByDimensions(List.of())); + assertEquals(stats.getTotalMisses(), stats.getMissesByDimensions(List.of())); + assertEquals(stats.getTotalEvictions(), stats.getEvictionsByDimensions(List.of())); + assertEquals(stats.getTotalMemorySize(), stats.getMemorySizeByDimensions(List.of())); + assertEquals(stats.getTotalEntries(), stats.getEntriesByDimensions(List.of())); + assertEquals(1, stats.map.size()); + } + + public void testTierLogic() throws Exception { + List dimensionNames = List.of("dim1", "dim2", "dim3", "dim4"); + MultiDimensionCacheStats stats = new MultiDimensionCacheStats(dimensionNames, tierDimensionValue); + Map> usedDimensionValues = getUsedDimensionValues(stats, 10); + Map, CacheStatsResponse> expected = populateStats(stats, usedDimensionValues, 1000, 10); + + CacheStatsDimension tierDim = new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, tierDimensionValue); + CacheStatsDimension wrongTierDim = new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, "wrong_value"); + + for (int i = 0; i < 1000; i++) { + List aggregationDims = getRandomDimList(stats.dimensionNames, usedDimensionValues, false, Randomness.get()); + List aggDimsWithTier = new ArrayList<>(aggregationDims); + aggDimsWithTier.add(tierDim); + + List aggDimsWithWrongTier = new ArrayList<>(aggregationDims); + aggDimsWithWrongTier.add(wrongTierDim); + CacheStatsResponse expectedResponse = new CacheStatsResponse(); + for (Set dimSet : expected.keySet()) { + if (dimSet.containsAll(aggregationDims)) { + expectedResponse.add(expected.get(dimSet)); + } + } + assertEquals(expectedResponse, stats.getStatsByDimensions(aggregationDims)); + assertEquals(expectedResponse, stats.getStatsByDimensions(aggDimsWithTier)); + assertEquals(new CacheStatsResponse(), stats.getStatsByDimensions(aggDimsWithWrongTier)); + } + assertEquals(stats.getTotalStats(), stats.getStatsByDimensions(List.of(tierDim))); + assertEquals(new CacheStatsResponse(), stats.getStatsByDimensions(List.of(wrongTierDim))); + } + + private Map> getUsedDimensionValues(MultiDimensionCacheStats stats, int numValuesPerDim) { + Map> usedDimensionValues = new HashMap<>(); + for (int i = 0; i < stats.dimensionNames.size(); i++) { + List values = new ArrayList<>(); + for (int j = 0; j < numValuesPerDim; j++) { + values.add(UUID.randomUUID().toString()); + } + usedDimensionValues.put(stats.dimensionNames.get(i), values); + } + return usedDimensionValues; + } + + private Map, CacheStatsResponse> populateStats(MultiDimensionCacheStats stats, Map> usedDimensionValues, int numDistinctValuePairs, int numRepetitionsPerValue) { + Map, CacheStatsResponse> expected = new HashMap<>(); + + Random rand = Randomness.get(); + for (int i = 0; i < numDistinctValuePairs; i++) { + List dimensions = getRandomDimList(stats.dimensionNames, usedDimensionValues, true, rand); + Set dimSet = new HashSet<>(dimensions); + if (expected.get(dimSet) == null) { + expected.put(dimSet, new CacheStatsResponse()); + } + + for (int j = 0; j < numRepetitionsPerValue; j++) { + + int numHitIncrements = rand.nextInt(10); + for (int k = 0; k < numHitIncrements; k++) { + stats.incrementHitsByDimensions(dimensions); + expected.get(new HashSet<>(dimensions)).hits.inc(); + } + + int numMissIncrements = rand.nextInt(10); + for (int k = 0; k < numMissIncrements; k++) { + stats.incrementMissesByDimensions(dimensions); + expected.get(new HashSet<>(dimensions)).misses.inc(); + } + + int numEvictionIncrements = rand.nextInt(10); + for (int k = 0; k < numEvictionIncrements; k++) { + stats.incrementEvictionsByDimensions(dimensions); + expected.get(new HashSet<>(dimensions)).evictions.inc(); + } + + int numMemorySizeIncrements = rand.nextInt(10); + for (int k = 0; k < numMemorySizeIncrements; k++) { + long memIncrementAmount = rand.nextInt(5000); + stats.incrementMemorySizeByDimensions(dimensions, memIncrementAmount); + expected.get(new HashSet<>(dimensions)).memorySize.inc(memIncrementAmount); + } + + int numEntryIncrements = rand.nextInt(9) + 1; + for (int k = 0; k < numEntryIncrements; k++) { + stats.incrementEntriesByDimensions(dimensions); + expected.get(new HashSet<>(dimensions)).entries.inc(); + } + + int numEntryDecrements = rand.nextInt(numEntryIncrements); + for (int k = 0; k < numEntryDecrements; k++) { + stats.decrementEntriesByDimensions(dimensions); + expected.get(new HashSet<>(dimensions)).entries.dec(); + } + } + } + return expected; + } + + private List getRandomDimList(List dimensionNames, Map> usedDimensionValues, boolean pickValueForAllDims, Random rand) { + List result = new ArrayList<>(); + for (String dimName : dimensionNames) { + if (pickValueForAllDims || rand.nextBoolean()) { // if pickValueForAllDims, always pick a value for each dimension, otherwise do so 50% of the time + int index = between(0, usedDimensionValues.get(dimName).size() - 1); + result.add(new CacheStatsDimension(dimName, usedDimensionValues.get(dimName).get(index))); + } + } + return result; + } +} From b4c83e72471b3796ff64662d6df254dfd0145804 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Tue, 27 Feb 2024 13:47:48 -0800 Subject: [PATCH 25/32] Removed SingleDimensionCacheStats Signed-off-by: Peter Alfonsi --- .../tier/TieredSpilloverCacheTests.java | 10 - .../cache/store/disk/EhcacheDiskCache.java | 1 - .../stats/SingleDimensionCacheStats.java | 283 ------------------ .../cache/store/OpenSearchOnHeapCache.java | 7 +- .../stats/SingleDimensionCacheStatsTests.java | 222 -------------- 5 files changed, 1 insertion(+), 522 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java delete mode 100644 server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java index f96bf489d5801..c1f5abe5a2fe1 100644 --- a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java +++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java @@ -12,10 +12,8 @@ import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.LoadAwareCacheLoader; -import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.OpenSearchOnHeapCache; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.config.CacheConfig; @@ -26,16 +24,8 @@ import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.test.OpenSearchTestCase; -import java.util.ArrayList; -import java.util.List; import java.util.Map; -import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Phaser; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY; diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 372a2993ecb18..4a610aae324e8 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -26,7 +26,6 @@ import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.common.cache.stats.MultiDimensionCacheStats; -import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.serializer.ICacheKeySerializer; import org.opensearch.common.cache.serializer.Serializer; diff --git a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java deleted file mode 100644 index 63f8ab27e43c2..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/stats/SingleDimensionCacheStats.java +++ /dev/null @@ -1,283 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.stats; - -import org.opensearch.common.metrics.CounterMetric; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -/** - * A CacheStats implementation for caches that aggregate over a single dimension, as well as holding a tier dimension. - * For example, caches in the IndicesRequestCache only aggregate over ShardId value. - */ -public class SingleDimensionCacheStats implements CacheStats { - // Maintain a counter metric for each shard id (dimension values) - private final ConcurrentMap hitsMap; - private final ConcurrentMap missesMap; - private final ConcurrentMap evictionsMap; - private final ConcurrentMap memorySizeMap; - private final ConcurrentMap entriesMap; - - // Also maintain a single total counter metric, to avoid having to sum over many values for shards - private final CounterMetric totalHits; - private final CounterMetric totalMisses; - private final CounterMetric totalEvictions; - private final CounterMetric totalMemorySize; - private final CounterMetric totalEntries; - - // The allowed dimension name. This stats only allows a single dimension name. Package-private for testing. - final String allowedDimensionName; - // The value of the tier dimension for entries in this Stats object. Package-private for testing. - final String tierDimensionValue; - - public SingleDimensionCacheStats(String allowedDimensionName, String tierDimensionValue) { - this.hitsMap = new ConcurrentHashMap<>(); - this.missesMap = new ConcurrentHashMap<>(); - this.evictionsMap = new ConcurrentHashMap<>(); - this.memorySizeMap = new ConcurrentHashMap<>(); - this.entriesMap = new ConcurrentHashMap<>(); - - this.totalHits = new CounterMetric(); - this.totalMisses = new CounterMetric(); - this.totalEvictions = new CounterMetric(); - this.totalMemorySize = new CounterMetric(); - this.totalEntries = new CounterMetric(); - - this.allowedDimensionName = allowedDimensionName; - this.tierDimensionValue = tierDimensionValue; - } - - public SingleDimensionCacheStats(StreamInput in) throws IOException { - this.hitsMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); - this.missesMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); - this.evictionsMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); - this.memorySizeMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); - this.entriesMap = convertLongMapToCounterMetric(in.readMap(StreamInput::readString, StreamInput::readVLong)); - - this.totalHits = new CounterMetric(); - totalHits.inc(in.readVLong()); - this.totalMisses = new CounterMetric(); - totalMisses.inc(in.readVLong()); - this.totalEvictions = new CounterMetric(); - totalEvictions.inc(in.readVLong()); - this.totalMemorySize = new CounterMetric(); - totalMemorySize.inc(in.readVLong()); - this.totalEntries = new CounterMetric(); - totalEntries.inc(in.readVLong()); - - this.allowedDimensionName = in.readString(); - this.tierDimensionValue = in.readString(); - } - - @Override - public CacheStatsResponse getTotalStats() { - return new CacheStatsResponse(getTotalHits(), getTotalMisses(), getTotalEvictions(), getTotalMemorySize(), getTotalEntries()); - } - - @Override - public CacheStatsResponse getStatsByDimensions(List dimensions) { - return new CacheStatsResponse( - getHitsByDimensions(dimensions), - getMissesByDimensions(dimensions), - getEvictionsByDimensions(dimensions), - getMemorySizeByDimensions(dimensions), - getEntriesByDimensions(dimensions) - ); - } - - @Override - public long getTotalHits() { - return this.totalHits.count(); - } - - @Override - public long getTotalMisses() { - return this.totalMisses.count(); - } - - @Override - public long getTotalEvictions() { - return this.totalEvictions.count(); - } - - @Override - public long getTotalMemorySize() { - return this.totalMemorySize.count(); - } - - @Override - public long getTotalEntries() { - return this.totalEntries.count(); - } - - private long internalGetByDimension(List dimensions, Map metricsMap, CounterMetric totalMetric) { - CacheStatsDimension tierDimension = getTierDimensionIfPresent(dimensions); - if (tierDimension != null) { - // This get request includes a tier dimension. Return values only if the tier dimension value - // matches the one for this stats object, otherwise return 0 - assert dimensions.size() == 1 || dimensions.size() == 2; // There can be at most one non-tier dimension value - if (tierDimension.dimensionValue.equals(tierDimensionValue)) { - // The list passed in may not be mutable; create a mutable copy to remove the tier dimension - ArrayList modifiedDimensions = new ArrayList<>(dimensions); - modifiedDimensions.remove(tierDimension); - - if (modifiedDimensions.size() == 1){ - return internalGetHelper(modifiedDimensions, metricsMap); - } else { - return totalMetric.count(); - } - - } else { - // Return 0 for incorrect tier value - return 0; - } - } else { - // This get request doesn't include a tier dimension. Return the appropriate values. - return internalGetHelper(dimensions, metricsMap); - } - } - - private long internalGetHelper(List dimensions, Map metricsMap) { - assert dimensions.size() == 1; - CounterMetric counter = metricsMap.get(dimensions.get(0).dimensionValue); - if (counter == null) { - return 0; - } - return counter.count(); - } - - /** - * Returns the dimension that represents a tier value, if one is present. Otherwise return null. - */ - private CacheStatsDimension getTierDimensionIfPresent(List dimensions) { - for (CacheStatsDimension dim : dimensions) { - if (dim.dimensionName.equals(CacheStatsDimension.TIER_DIMENSION_NAME)) { - return dim; - } - } - return null; - } - - @Override - public long getHitsByDimensions(List dimensions) { - return internalGetByDimension(dimensions, hitsMap, totalHits); - } - - @Override - public long getMissesByDimensions(List dimensions) { - return internalGetByDimension(dimensions, missesMap, totalMisses); - } - - @Override - public long getEvictionsByDimensions(List dimensions) { - return internalGetByDimension(dimensions, evictionsMap, totalEvictions); - } - - @Override - public long getMemorySizeByDimensions(List dimensions) { - return internalGetByDimension(dimensions, memorySizeMap, totalMemorySize); - } - - @Override - public long getEntriesByDimensions(List dimensions) { - return internalGetByDimension(dimensions, entriesMap, totalEntries); - } - - private boolean checkDimensionList(List dimensions) { - return dimensions.size() == 1 && allowedDimensionName.equals(dimensions.get(0).dimensionName); - } - private void internalIncrement(List dimensions, Map metricMap, CounterMetric totalMetric, long incrementAmount) { - if (checkDimensionList(dimensions)) { - String dimensionValue = dimensions.get(0).dimensionValue; - totalMetric.inc(incrementAmount); - CounterMetric counter = metricMap.get(dimensionValue); - if (counter == null) { - counter = new CounterMetric(); - metricMap.put(dimensionValue, counter); - } - counter.inc(incrementAmount); - } - } - - @Override - public void incrementHitsByDimensions(List dimensions) { - internalIncrement(dimensions, hitsMap, totalHits, 1); - } - - @Override - public void incrementMissesByDimensions(List dimensions) { - internalIncrement(dimensions, missesMap, totalMisses, 1); - } - - @Override - public void incrementEvictionsByDimensions(List dimensions) { - internalIncrement(dimensions, evictionsMap, totalEvictions, 1); - } - - @Override - public void incrementMemorySizeByDimensions(List dimensions, long amountBytes) { - internalIncrement(dimensions, memorySizeMap, totalMemorySize, amountBytes); - } - - @Override - public void incrementEntriesByDimensions(List dimensions) { - internalIncrement(dimensions, entriesMap, totalEntries, 1); - } - - @Override - public void decrementEntriesByDimensions(List dimensions) { - internalIncrement(dimensions, entriesMap, totalEntries, -1); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeMap(convertCounterMapToLong(hitsMap), StreamOutput::writeString, StreamOutput::writeVLong); - out.writeMap(convertCounterMapToLong(missesMap), StreamOutput::writeString, StreamOutput::writeVLong); - out.writeMap(convertCounterMapToLong(evictionsMap), StreamOutput::writeString, StreamOutput::writeVLong); - out.writeMap(convertCounterMapToLong(memorySizeMap), StreamOutput::writeString, StreamOutput::writeVLong); - out.writeMap(convertCounterMapToLong(entriesMap), StreamOutput::writeString, StreamOutput::writeVLong); - - out.writeVLong(totalHits.count()); - out.writeVLong(totalMisses.count()); - out.writeVLong(totalEvictions.count()); - out.writeVLong(totalMemorySize.count()); - out.writeVLong(totalEntries.count()); - - out.writeString(allowedDimensionName); - out.writeString(tierDimensionValue); - } - - // For converting to StreamOutput/StreamInput, write maps of longs rather than CounterMetrics which don't support writing - private Map convertCounterMapToLong(Map inputMap) { - Map result = new HashMap<>(); - for (String key : inputMap.keySet()) { - result.put(key, inputMap.get(key).count()); - } - return result; - } - - private ConcurrentMap convertLongMapToCounterMetric(Map inputMap) { - ConcurrentMap result = new ConcurrentHashMap<>(); - for (String key: inputMap.keySet()) { - CounterMetric counter = new CounterMetric(); - counter.inc(inputMap.get(key)); - result.put(key, counter); - } - return result; - } -} diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index ff3274409e110..172c3f2110b67 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -15,24 +15,19 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; -import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.common.cache.stats.MultiDimensionCacheStats; -import org.opensearch.common.cache.stats.SingleDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.config.CacheConfig; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.TimeValue; import java.util.List; import java.util.Objects; -import java.util.function.ToLongBiFunction; -import org.opensearch.common.cache.store.builders.ICacheBuilder; + import org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings; import org.opensearch.common.settings.Setting; -import org.opensearch.common.settings.Settings; import org.opensearch.core.common.unit.ByteSizeValue; import java.util.Map; diff --git a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java deleted file mode 100644 index 1c94ce9322622..0000000000000 --- a/server/src/test/java/org/opensearch/common/cache/stats/SingleDimensionCacheStatsTests.java +++ /dev/null @@ -1,222 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.stats; - -import org.opensearch.common.Randomness; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.common.io.stream.BytesStreamInput; -import org.opensearch.test.OpenSearchTestCase; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Random; - -public class SingleDimensionCacheStatsTests extends OpenSearchTestCase { - private final String dimensionName = "shardId"; - private final String tierName = "test_tier"; - public void testAddAndGet() throws Exception { - StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(tierName); - SingleDimensionCacheStats stats = statsAndExpectedResults.stats; - - checkShardResults(statsAndExpectedResults); - checkTotalResults(statsAndExpectedResults); - - // Check values returned for a nonexistent dimension value or name return 0 - assertEquals(0, stats.getHitsByDimensions(List.of(new CacheStatsDimension(dimensionName, "nonexistent")))); - assertEquals(0, stats.getHitsByDimensions(List.of(new CacheStatsDimension("nonexistentName", "nonexistentValue")))); - - // Check sending too many values causes an assertion error - assertThrows(AssertionError.class, () -> stats.getHitsByDimensions(List.of(getDim(0), new CacheStatsDimension("test", "value")))); - } - - public void testTierFiltering() throws Exception { - StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(tierName); - SingleDimensionCacheStats stats = statsAndExpectedResults.stats; - - // Values should be returned if the tier dimension value matches the one passed to SingleDimensionCacheStats. Otherwise we should get 0. - CacheStatsDimension matchingTierDim = new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, tierName); - CacheStatsDimension nonMatchingTierDim = new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, "another_tier"); - - assertEquals(stats.getTotalHits(), stats.getHitsByDimensions(List.of(matchingTierDim))); - assertEquals(0, stats.getHitsByDimensions(List.of(nonMatchingTierDim))); - for (int i = 0; i < statsAndExpectedResults.numShardIds; i++) { - assertEquals(stats.getHitsByDimensions(List.of(getDim(i))), stats.getHitsByDimensions(List.of(getDim(i), matchingTierDim))); - assertEquals(stats.getHitsByDimensions(List.of(getDim(i))), stats.getHitsByDimensions(List.of(matchingTierDim, getDim(i)))); - assertEquals(0, stats.getHitsByDimensions(List.of(getDim(i), nonMatchingTierDim))); - assertEquals(0, stats.getHitsByDimensions(List.of(nonMatchingTierDim, getDim(i)))); - - } - // Check sending too many values causes an assertion error - assertThrows(AssertionError.class, () -> stats.getHitsByDimensions(List.of(getDim(0), matchingTierDim, new CacheStatsDimension("test", "value")))); - } - - public void testSerialization() throws Exception { - StatsAndExpectedResults statsAndExpectedResults = getPopulatedStats(tierName); - SingleDimensionCacheStats stats = statsAndExpectedResults.stats; - Map> expectedResults = statsAndExpectedResults.expectedShardResults; - - BytesStreamOutput os = new BytesStreamOutput(); - stats.writeTo(os); - BytesStreamInput is = new BytesStreamInput(BytesReference.toBytes(os.bytes())); - SingleDimensionCacheStats deserialized = new SingleDimensionCacheStats(is); - - StatsAndExpectedResults deserializedStatsAndExpectedResults = new StatsAndExpectedResults(deserialized, expectedResults, statsAndExpectedResults.numShardIds); - checkShardResults(deserializedStatsAndExpectedResults); - checkTotalResults(deserializedStatsAndExpectedResults); - assertEquals(deserialized.allowedDimensionName, stats.allowedDimensionName); - assertEquals(deserialized.tierDimensionValue, stats.tierDimensionValue); - } - - private CacheStatsDimension getDim(int i) { - return new CacheStatsDimension(dimensionName, String.valueOf(i)); - } - - private List getDimList(int i) { - ArrayList result = new ArrayList<>(); - result.add(getDim(i)); - return result; - } - - private long sumMap(Map inputMap) { - long result = 0; - for (String key : inputMap.keySet()) { - result += inputMap.get(key); - } - return result; - } - - private StatsAndExpectedResults getPopulatedStats(String tierName) { - SingleDimensionCacheStats stats = new SingleDimensionCacheStats(dimensionName, tierName); - - int numShardIds = 10; - Map expectedHits = new HashMap<>(); - Map expectedMisses = new HashMap<>(); - Map expectedEvictions = new HashMap<>(); - Map expectedMemorySize = new HashMap<>(); - Map expectedEntries = new HashMap<>(); - - Random rand = Randomness.get(); - - // For each shard id value, increment metrics some random number of times (possibly 0) - for (int shardId = 0; shardId < numShardIds; shardId++) { - - String shardIdString = String.valueOf(shardId); - List dimensions = getDimList(shardId); - - for (Map map : new Map[]{expectedHits, expectedMisses, expectedEvictions, expectedMemorySize, expectedEntries}) { - map.put(shardIdString, 0L); - } - - int numHitIncrements = rand.nextInt(10); - for (int i = 0; i < numHitIncrements; i++) { - stats.incrementHitsByDimensions(dimensions); - expectedHits.put(shardIdString, expectedHits.get(shardIdString) + 1); - } - - int numMissIncrements = rand.nextInt(10); - for (int i = 0; i < numMissIncrements; i++) { - stats.incrementMissesByDimensions(dimensions); - expectedMisses.put(shardIdString, expectedMisses.get(shardIdString) + 1); - } - - int numEvictionIncrements = rand.nextInt(10); - for (int i = 0; i < numEvictionIncrements; i++) { - stats.incrementEvictionsByDimensions(dimensions); - expectedEvictions.put(shardIdString, expectedEvictions.get(shardIdString) + 1); - } - - int numMemorySizeIncrements = rand.nextInt(10); - for (int i = 0; i < numMemorySizeIncrements; i++) { - long memIncrementAmount = (long) rand.nextInt(5000); - stats.incrementMemorySizeByDimensions(dimensions, memIncrementAmount); - expectedMemorySize.put(shardIdString, expectedMemorySize.get(shardIdString) + memIncrementAmount); - } - - int numEntryIncrements = rand.nextInt(9) + 1; - for (int i = 0; i < numEntryIncrements; i++) { - stats.incrementEntriesByDimensions(dimensions); - expectedEntries.put(shardIdString, expectedEntries.get(shardIdString) + 1); - } - - int numEntryDecrements = rand.nextInt(numEntryIncrements); - for (int i = 0; i < numEntryDecrements; i++) { - stats.decrementEntriesByDimensions(dimensions); - expectedEntries.put(shardIdString, expectedEntries.get(shardIdString) - 1); - } - } - Map> expectedShardResults = new HashMap<>(); - expectedShardResults.put("hits", expectedHits); - expectedShardResults.put("misses", expectedMisses); - expectedShardResults.put("evictions", expectedEvictions); - expectedShardResults.put("memory_size", expectedMemorySize); - expectedShardResults.put("entries", expectedEntries); - return new StatsAndExpectedResults(stats, expectedShardResults, numShardIds); - } - - private void checkShardResults(StatsAndExpectedResults statsAndExpectedResults) { - // check the resulting values on dimension level are what we expect - Map> expectedResults = statsAndExpectedResults.expectedShardResults; - SingleDimensionCacheStats stats = statsAndExpectedResults.stats; - for (int shardId = 0; shardId < statsAndExpectedResults.numShardIds; shardId++) { - String shardIdString = String.valueOf(shardId); - CacheStatsDimension dimension = getDim(shardId); - - // Check the individual metric getters - assertEquals((long) expectedResults.get("hits").get(shardIdString), stats.getHitsByDimensions(List.of(dimension))); - assertEquals((long) expectedResults.get("misses").get(shardIdString), stats.getMissesByDimensions(List.of(dimension))); - assertEquals((long) expectedResults.get("evictions").get(shardIdString), stats.getEvictionsByDimensions(List.of(dimension))); - assertEquals((long) expectedResults.get("memory_size").get(shardIdString), stats.getMemorySizeByDimensions(List.of(dimension))); - assertEquals((long) expectedResults.get("entries").get(shardIdString), stats.getEntriesByDimensions(List.of(dimension))); - - // Check the total metric getter - CacheStatsResponse response = stats.getStatsByDimensions(List.of(dimension)); - assertEquals((long) expectedResults.get("hits").get(shardIdString), response.hits); - assertEquals((long) expectedResults.get("misses").get(shardIdString), response.misses); - assertEquals((long) expectedResults.get("evictions").get(shardIdString), response.evictions); - assertEquals((long) expectedResults.get("memory_size").get(shardIdString), response.memorySize); - assertEquals((long) expectedResults.get("entries").get(shardIdString), response.entries); - } - } - - private void checkTotalResults(StatsAndExpectedResults statsAndExpectedResults) { - // check resulting total values are what we expect - Map> expectedResults = statsAndExpectedResults.expectedShardResults; - SingleDimensionCacheStats stats = statsAndExpectedResults.stats; - - // Check the individual metric getters - assertEquals(sumMap(expectedResults.get("hits")), stats.getTotalHits()); - assertEquals(sumMap(expectedResults.get("misses")), stats.getTotalMisses()); - assertEquals(sumMap(expectedResults.get("evictions")), stats.getTotalEvictions()); - assertEquals(sumMap(expectedResults.get("memory_size")), stats.getTotalMemorySize()); - assertEquals(sumMap(expectedResults.get("entries")), stats.getTotalEntries()); - - // Check the total metric getter - CacheStatsResponse totalResponse = stats.getTotalStats(); - assertEquals(sumMap(expectedResults.get("hits")), totalResponse.hits); - assertEquals(sumMap(expectedResults.get("misses")), totalResponse.misses); - assertEquals(sumMap(expectedResults.get("evictions")), totalResponse.evictions); - assertEquals(sumMap(expectedResults.get("memory_size")), totalResponse.memorySize); - assertEquals(sumMap(expectedResults.get("entries")), totalResponse.entries); - } - - // Convenience class to allow reusing setup code across tests - private class StatsAndExpectedResults { - private final SingleDimensionCacheStats stats; - private final Map> expectedShardResults; - private final int numShardIds; - private StatsAndExpectedResults(SingleDimensionCacheStats stats, Map> expectedShardResults, int numShardIds) { - this.stats = stats; - this.expectedShardResults = expectedShardResults; - this.numShardIds = numShardIds; - } - } -} From d579c51ffd5997809db29c867a47a5772bd7ff39 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Tue, 27 Feb 2024 13:50:41 -0800 Subject: [PATCH 26/32] Adds IRC key serializer Signed-off-by: Peter Alfonsi --- .../indices/IRCKeyWriteableSerializer.java | 63 +++++++++++++++++++ .../IRCKeyWriteableSerializerTests.java | 54 ++++++++++++++++ 2 files changed, 117 insertions(+) create mode 100644 server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java create mode 100644 server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java diff --git a/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java b/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java new file mode 100644 index 0000000000000..56d16e84f3f42 --- /dev/null +++ b/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java @@ -0,0 +1,63 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.indices; + +import org.opensearch.OpenSearchException; +import org.opensearch.common.cache.serializer.Serializer; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.BytesStreamInput; + +import java.io.IOException; +import java.util.Arrays; + +/** + * This class serializes the IndicesRequestCache.Key using its writeTo method. + */ +public class IRCKeyWriteableSerializer implements Serializer { + + + public IRCKeyWriteableSerializer() { + } + + @Override + public byte[] serialize(IndicesRequestCache.Key object) { + try { + BytesStreamOutput os = new BytesStreamOutput(); + object.writeTo(os); + return BytesReference.toBytes(os.bytes()); + } catch (IOException e) { + throw new OpenSearchException(e); + } + } + + @Override + public IndicesRequestCache.Key deserialize(byte[] bytes) { + if (bytes == null) { + return null; + } + try { + BytesStreamInput is = new BytesStreamInput(bytes, 0, bytes.length); + return new IndicesRequestCache.Key(is); + } catch (IOException e) { + throw new OpenSearchException(e); + } + } + + @Override + public boolean equals(IndicesRequestCache.Key object, byte[] bytes) { + // Deserialization is much slower than serialization for keys of order 1 KB, + // while time to serialize is fairly constant (per byte) + if (bytes.length < 5000) { + return Arrays.equals(serialize(object), bytes); + } else { + return object.equals(deserialize(bytes)); + } + } +} diff --git a/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java b/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java new file mode 100644 index 0000000000000..a4a97b5c8470f --- /dev/null +++ b/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java @@ -0,0 +1,54 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.indices; + +import org.opensearch.common.Randomness; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.IndexService; +import org.opensearch.index.shard.IndexShard; +import org.opensearch.test.OpenSearchSingleNodeTestCase; + +import java.util.Random; +import java.util.UUID; + +public class IRCKeyWriteableSerializerTests extends OpenSearchSingleNodeTestCase { + + public void testSerializer() throws Exception { + IndexService indexService = createIndex("test"); + IndexShard indexShard = indexService.getShardOrNull(0); + IRCKeyWriteableSerializer ser = new IRCKeyWriteableSerializer(); + + int NUM_KEYS = 1000; + int[] valueLengths = new int[] { 1000, 6000 }; // test both branches in equals() + Random rand = Randomness.get(); + for (int valueLength : valueLengths) { + for (int i = 0; i < NUM_KEYS; i++) { + IndicesRequestCache.Key key = getRandomIRCKey(valueLength, rand, indexShard.shardId()); + byte[] serialized = ser.serialize(key); + assertTrue(ser.equals(key, serialized)); + IndicesRequestCache.Key deserialized = ser.deserialize(serialized); + assertTrue(key.equals(deserialized)); + } + } + } + + private IndicesRequestCache.Key getRandomIRCKey( + int valueLength, + Random random, + ShardId shard + ) { + byte[] value = new byte[valueLength]; + for (int i = 0; i < valueLength; i++) { + value[i] = (byte) (random.nextInt(126 - 32) + 32); + } + BytesReference keyValue = new BytesArray(value); + return new IndicesRequestCache.Key(shard, keyValue, UUID.randomUUID().toString()); // same UUID source as used in real key + } +} From c34c218902c7aa819b5cf0d53395a4338f16d93c Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Tue, 27 Feb 2024 13:54:09 -0800 Subject: [PATCH 27/32] Addressed Sagar's other comment Signed-off-by: Peter Alfonsi --- .../org/opensearch/cache/store/disk/EhcacheDiskCache.java | 4 +++- .../opensearch/cache/store/disk/EhCacheDiskCacheTests.java | 4 ++-- .../opensearch/common/cache/stats/CacheStatsDimension.java | 2 -- .../opensearch/common/cache/store/OpenSearchOnHeapCache.java | 3 ++- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 4a610aae324e8..e769f0e0d7ca1 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -115,6 +115,8 @@ public class EhcacheDiskCache implements ICache { private final Serializer keySerializer; private final Serializer valueSerializer; + public final static String TIER_DIMENSION_VALUE = "disk"; + /** * Used in computeIfAbsent to synchronize loading of a given key. This is needed as ehcache doesn't provide a * computeIfAbsent method. @@ -154,7 +156,7 @@ private EhcacheDiskCache(Builder builder) { this.valueSerializer); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); List dimensionNames = Objects.requireNonNull(builder.dimensionNames, "Dimension names can't be null"); - this.stats = new MultiDimensionCacheStats(dimensionNames, CacheStatsDimension.TIER_DIMENSION_VALUE_DISK); + this.stats = new MultiDimensionCacheStats(dimensionNames, TIER_DIMENSION_VALUE); } private Cache buildCache(Duration expireAfterAccess, Builder builder) { diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 337fe75c67131..a25cde40396a6 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -632,8 +632,8 @@ public void testGetStatsByTierName() throws Exception { for (int i = 0; i < randomKeys; i++) { ehcacheTest.put(getICacheKey(UUID.randomUUID().toString()), UUID.randomUUID().toString()); } - assertEquals(randomKeys, ehcacheTest.stats().getEntriesByDimensions(List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, CacheStatsDimension.TIER_DIMENSION_VALUE_DISK)))); - assertEquals(0, ehcacheTest.stats().getEntriesByDimensions(List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, CacheStatsDimension.TIER_DIMENSION_VALUE_ON_HEAP)))); + assertEquals(randomKeys, ehcacheTest.stats().getEntriesByDimensions(List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, EhcacheDiskCache.TIER_DIMENSION_VALUE)))); + assertEquals(0, ehcacheTest.stats().getEntriesByDimensions(List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, "other_tier_value")))); ehcacheTest.close(); } diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java index 4abdbff5d5a4a..5ba83122f7fd2 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java @@ -18,8 +18,6 @@ public class CacheStatsDimension implements Writeable { // Values for tier dimensions, that are reused across CacheStats implementations public static final String TIER_DIMENSION_NAME = "tier"; - public static final String TIER_DIMENSION_VALUE_ON_HEAP = "on_heap"; - public static final String TIER_DIMENSION_VALUE_DISK = "disk"; public final String dimensionName; public final String dimensionValue; public CacheStatsDimension(String dimensionName, String dimensionValue) { diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index 172c3f2110b67..fe003be3de5d4 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -45,6 +45,7 @@ public class OpenSearchOnHeapCache implements ICache, RemovalListene private final Cache, V> cache; private final CacheStats stats; private final RemovalListener, V> removalListener; + public static final String TIER_DIMENSION_VALUE = "on_heap"; public OpenSearchOnHeapCache(Builder builder) { CacheBuilder, V> cacheBuilder = CacheBuilder., V>builder() @@ -56,7 +57,7 @@ public OpenSearchOnHeapCache(Builder builder) { } cache = cacheBuilder.build(); List dimensionNames = Objects.requireNonNull(builder.dimensionNames, "Dimension names can't be null"); - this.stats = new MultiDimensionCacheStats(dimensionNames, CacheStatsDimension.TIER_DIMENSION_VALUE_ON_HEAP); + this.stats = new MultiDimensionCacheStats(dimensionNames, TIER_DIMENSION_VALUE); this.removalListener = builder.getRemovalListener(); } From a61f033201c694614a9471be97c4fa52eeb829bb Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Thu, 29 Feb 2024 13:19:21 -0800 Subject: [PATCH 28/32] Fixed on heap stats integration, added tests Signed-off-by: Peter Alfonsi --- .../org/opensearch/common/cache/Cache.java | 4 + .../cache/store/OpenSearchOnHeapCache.java | 19 ++- .../store/OpenSearchOnHeapCacheTests.java | 135 ++++++++++++++++++ 3 files changed, 154 insertions(+), 4 deletions(-) create mode 100644 server/src/test/java/org/opensearch/common/cache/store/OpenSearchOnHeapCacheTests.java diff --git a/server/src/main/java/org/opensearch/common/cache/Cache.java b/server/src/main/java/org/opensearch/common/cache/Cache.java index d8aa4e93735e6..ad8a1f01bb0fe 100644 --- a/server/src/main/java/org/opensearch/common/cache/Cache.java +++ b/server/src/main/java/org/opensearch/common/cache/Cache.java @@ -899,4 +899,8 @@ private void relinkAtHead(Entry entry) { private CacheSegment getCacheSegment(K key) { return segments[key.hashCode() & 0xff]; } + + public ToLongBiFunction getWeigher() { + return weigher; + } } diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index fe003be3de5d4..758ba360eb27c 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -15,9 +15,9 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.ICacheKey; -import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.common.cache.stats.MultiDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.config.CacheConfig; @@ -31,6 +31,7 @@ import org.opensearch.core.common.unit.ByteSizeValue; import java.util.Map; +import java.util.function.ToLongBiFunction; import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY; @@ -43,8 +44,9 @@ */ public class OpenSearchOnHeapCache implements ICache, RemovalListener, V> { private final Cache, V> cache; - private final CacheStats stats; + private CacheStats stats; private final RemovalListener, V> removalListener; + private final List dimensionNames; public static final String TIER_DIMENSION_VALUE = "on_heap"; public OpenSearchOnHeapCache(Builder builder) { @@ -56,7 +58,7 @@ public OpenSearchOnHeapCache(Builder builder) { cacheBuilder.setExpireAfterAccess(builder.getExpireAfterAcess()); } cache = cacheBuilder.build(); - List dimensionNames = Objects.requireNonNull(builder.dimensionNames, "Dimension names can't be null"); + this.dimensionNames = Objects.requireNonNull(builder.dimensionNames, "Dimension names can't be null"); this.stats = new MultiDimensionCacheStats(dimensionNames, TIER_DIMENSION_VALUE); this.removalListener = builder.getRemovalListener(); } @@ -76,6 +78,7 @@ public V get(ICacheKey key) { public void put(ICacheKey key, V value) { cache.put(key, value); stats.incrementEntriesByDimensions(key.dimensions); + stats.incrementMemorySizeByDimensions(key.dimensions, cache.getWeigher().applyAsLong(key, value)); } @Override @@ -86,6 +89,7 @@ public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> } else { stats.incrementMissesByDimensions(key.dimensions); stats.incrementEntriesByDimensions(key.dimensions); + stats.incrementMemorySizeByDimensions(key.dimensions, cache.getWeigher().applyAsLong(key, value)); } return value; } @@ -93,12 +97,12 @@ public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> @Override public void invalidate(ICacheKey key) { cache.invalidate(key); - stats.decrementEntriesByDimensions(key.dimensions); } @Override public void invalidateAll() { cache.invalidateAll(); + stats = new MultiDimensionCacheStats(dimensionNames, TIER_DIMENSION_VALUE); } @Override @@ -127,6 +131,13 @@ public CacheStats stats() { @Override public void onRemoval(RemovalNotification, V> notification) { removalListener.onRemoval(notification); + stats.decrementEntriesByDimensions(notification.getKey().dimensions); + stats.incrementMemorySizeByDimensions(notification.getKey().dimensions, -cache.getWeigher().applyAsLong(notification.getKey(), notification.getValue())); + + if (RemovalReason.EVICTED.equals(notification.getRemovalReason()) + || RemovalReason.CAPACITY.equals(notification.getRemovalReason())) { + stats.incrementEvictionsByDimensions(notification.getKey().dimensions); + } } /** diff --git a/server/src/test/java/org/opensearch/common/cache/store/OpenSearchOnHeapCacheTests.java b/server/src/test/java/org/opensearch/common/cache/store/OpenSearchOnHeapCacheTests.java new file mode 100644 index 0000000000000..22d8813aede19 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/store/OpenSearchOnHeapCacheTests.java @@ -0,0 +1,135 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.ICacheKey; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.stats.CacheStatsDimension; +import org.opensearch.common.cache.store.config.CacheConfig; +import org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings; +import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY; + +public class OpenSearchOnHeapCacheTests extends OpenSearchTestCase { + private final static long keyValueSize = 50; + private final static List dimensionNames = List.of("dim1", "dim2"); + public void testStats() throws Exception { + MockRemovalListener listener = new MockRemovalListener<>(); + int maxKeys = between(10, 50); + int numEvicted = between(10, 20); + OpenSearchOnHeapCache cache = getCache(maxKeys, listener); + + List> keysAdded = new ArrayList<>(); + int numAdded = maxKeys + numEvicted; + for (int i = 0; i < numAdded; i++) { + ICacheKey key = getICacheKey(UUID.randomUUID().toString()); + keysAdded.add(key); + cache.computeIfAbsent(key, getLoadAwareCacheLoader()); + + assertEquals(i + 1, cache.stats().getTotalMisses()); + assertEquals(0, cache.stats().getTotalHits()); + assertEquals(Math.min(maxKeys, i + 1), cache.stats().getTotalEntries()); + assertEquals(Math.min(maxKeys, i + 1) * keyValueSize, cache.stats().getTotalMemorySize()); + assertEquals(Math.max(0, i + 1 - maxKeys), cache.stats().getTotalEvictions()); + } + // do gets from the last part of the list, which should be hits + for (int i = numAdded - maxKeys; i < numAdded; i++) { + cache.computeIfAbsent(keysAdded.get(i), getLoadAwareCacheLoader()); + int numHits = i + 1 - (numAdded - maxKeys); + + assertEquals(numAdded, cache.stats().getTotalMisses()); + assertEquals(numHits, cache.stats().getTotalHits()); + assertEquals(maxKeys, cache.stats().getTotalEntries()); + assertEquals(maxKeys * keyValueSize, cache.stats().getTotalMemorySize()); + assertEquals(numEvicted, cache.stats().getTotalEvictions()); + } + + // invalidate keys + for (int i = numAdded - maxKeys; i < numAdded; i++) { + cache.invalidate(keysAdded.get(i)); + int numInvalidated = i + 1 - (numAdded - maxKeys); + + assertEquals(numAdded, cache.stats().getTotalMisses()); + assertEquals(maxKeys, cache.stats().getTotalHits()); + assertEquals(maxKeys - numInvalidated, cache.stats().getTotalEntries()); + assertEquals((maxKeys - numInvalidated) * keyValueSize, cache.stats().getTotalMemorySize()); + assertEquals(numEvicted, cache.stats().getTotalEvictions()); + } + } + + private OpenSearchOnHeapCache getCache(int maxSizeKeys, MockRemovalListener listener) { + ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(); + Settings settings = Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + maxSizeKeys * keyValueSize + "b" + ) + .build(); + + CacheConfig cacheConfig = new CacheConfig.Builder() + .setKeyType(String.class) + .setValueType(String.class) + .setWeigher((k, v) -> keyValueSize) + .setRemovalListener(listener) + .setSettings(settings) + .setDimensionNames(dimensionNames) + .build(); + return (OpenSearchOnHeapCache) onHeapCacheFactory.create(cacheConfig, CacheType.INDICES_REQUEST_CACHE, null); + } + + private static class MockRemovalListener implements RemovalListener, V> { + CounterMetric numRemovals; + MockRemovalListener() { + numRemovals = new CounterMetric(); + } + + @Override + public void onRemoval(RemovalNotification, V> notification) { + numRemovals.inc(); + } + } + + private ICacheKey getICacheKey(String key) { + List dims = new ArrayList<>(); + for (String dimName : dimensionNames) { + dims.add(new CacheStatsDimension(dimName, "0")); + } + return new ICacheKey<>(key, dims); + } + + private LoadAwareCacheLoader, String> getLoadAwareCacheLoader() { + return new LoadAwareCacheLoader<>() { + boolean isLoaded = false; + + @Override + public String load(ICacheKey key) { + isLoaded = true; + return UUID.randomUUID().toString(); + } + + @Override + public boolean isLoaded() { + return isLoaded; + } + }; + } +} From 2483981e6e04beeae940e337a9221a1dd7e7a90c Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Fri, 1 Mar 2024 10:24:22 -0800 Subject: [PATCH 29/32] Optimized multi dimension stats Signed-off-by: Peter Alfonsi --- .../cache/stats/MultiDimensionCacheStats.java | 76 +++++++++++++------ .../stats/MultiDimensionCacheStatsTests.java | 17 +++++ 2 files changed, 68 insertions(+), 25 deletions(-) diff --git a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java index ba356335c11b4..370010dd7e282 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java @@ -13,6 +13,8 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -42,7 +44,7 @@ public class MultiDimensionCacheStats implements CacheStats { final String tierDimensionValue; // A map from a set of cache stats dimensions -> stats for that combination of dimensions. Does not include the tier dimension in its keys. - final ConcurrentMap, CacheStatsResponse> map; + final ConcurrentMap map; final int maxDimensionValues; CacheStatsResponse totalStats; @@ -62,11 +64,11 @@ public MultiDimensionCacheStats(List dimensionNames, String tierDimensio public MultiDimensionCacheStats(StreamInput in) throws IOException { this.dimensionNames = List.of(in.readStringArray()); this.tierDimensionValue = in.readString(); - Map, CacheStatsResponse> readMap = in.readMap( - i -> Set.of(i.readArray(CacheStatsDimension::new, CacheStatsDimension[]::new)), + Map readMap = in.readMap( + i -> new Key(Set.of(i.readArray(CacheStatsDimension::new, CacheStatsDimension[]::new))), CacheStatsResponse::new ); - this.map = new ConcurrentHashMap, CacheStatsResponse>(readMap); + this.map = new ConcurrentHashMap(readMap); this.totalStats = new CacheStatsResponse(in); this.maxDimensionValues = in.readVInt(); } @@ -77,7 +79,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(tierDimensionValue); out.writeMap( map, - (o, keySet) -> o.writeArray((o1, dim) -> ((CacheStatsDimension) dim).writeTo(o1), keySet.toArray()), + (o, key) -> o.writeArray((o1, dim) -> ((CacheStatsDimension) dim).writeTo(o1), key.dimensions.toArray()), (o, response) -> response.writeTo(o) ); totalStats.writeTo(out); @@ -89,6 +91,10 @@ public CacheStatsResponse getTotalStats() { return totalStats; } + /** + * Get the stats response aggregated by dimensions. If there are no values for the specified dimensions, + * returns an all-zero response. + */ @Override public CacheStatsResponse getStatsByDimensions(List dimensions) { if (!checkDimensionNames(dimensions)) { @@ -103,12 +109,16 @@ public CacheStatsResponse getStatsByDimensions(List dimensi modifiedDimensions.remove(tierDim); } + if (modifiedDimensions.size() == dimensionNames.size()) { + return map.getOrDefault(new Key(modifiedDimensions), new CacheStatsResponse()); + } + // I don't think there's a more efficient way to get arbitrary combinations of dimensions than to just keep a map // and iterate through it, checking if keys match. We can't pre-aggregate because it would consume a lot of memory. CacheStatsResponse response = new CacheStatsResponse(); - for (Set storedDimensions : map.keySet()) { - if (storedDimensions.containsAll(modifiedDimensions)) { - response.add(map.get(storedDimensions)); + for (Key key : map.keySet()) { + if (key.dimensions.containsAll(modifiedDimensions)) { + response.add(map.get(key)); } } return response; @@ -128,27 +138,14 @@ private CacheStatsDimension getTierDimension(List dimension private boolean checkDimensionNames(List dimensions) { for (CacheStatsDimension dim : dimensions) { - if (!dimensionNames.contains(dim.dimensionName) && !dim.dimensionName.equals(CacheStatsDimension.TIER_DIMENSION_NAME)) { + if (!(dimensionNames.contains(dim.dimensionName) || dim.dimensionName.equals(CacheStatsDimension.TIER_DIMENSION_NAME))) { + // Reject dimension names that aren't in the list and aren't the tier dimension return false; } } return true; } - private CacheStatsResponse getStatsBySingleDimension(CacheStatsDimension dimension) { - assert dimensionNames.size() == 1; - CacheStatsResponse response = new CacheStatsResponse(); - for (Set dimensions : map.keySet()) { - // Each set has only one element - for (CacheStatsDimension keyDimension : dimensions) { - if (keyDimension.dimensionValue.equals(dimension.dimensionValue)) { - response.add(map.get(dimensions)); - } - } - } - return response; - } - @Override public long getTotalHits() { return totalStats.getHits(); @@ -231,11 +228,11 @@ public void decrementEntriesByDimensions(List dimensions) { private CacheStatsResponse internalGetStats(List dimensions) { assert dimensions.size() == dimensionNames.size(); - CacheStatsResponse response = map.get(new HashSet<>(dimensions)); + CacheStatsResponse response = map.get(new Key(dimensions)); if (response == null) { if (map.size() < maxDimensionValues) { response = new CacheStatsResponse(); - map.put(new HashSet<>(dimensions), response); + map.put(new Key(dimensions), response); } else { throw new RuntimeException("Cannot add new combination of dimension values to stats object; reached maximum"); } @@ -249,4 +246,33 @@ private void internalIncrement(List dimensions, BiConsumer< incrementer.accept(totalStats, amount); } + /** + * Unmodifiable wrapper over a set of CacheStatsDimension. Pkg-private for testing. + */ + static class Key { + final Set dimensions; + Key(Set dimensions) { + this.dimensions = Collections.unmodifiableSet(dimensions); + } + Key(List dimensions) { + this(new HashSet<>(dimensions)); + } + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } if (o == null) { + return false; + } if (o.getClass() != Key.class) { + return false; + } + Key other = (Key) o; + return this.dimensions.equals(other.dimensions); + } + + @Override + public int hashCode() { + return this.dimensions.hashCode(); + } + } } diff --git a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java index 127b5c979f27a..7855a2d202246 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java @@ -149,6 +149,23 @@ public void testTierLogic() throws Exception { assertEquals(new CacheStatsResponse(), stats.getStatsByDimensions(List.of(wrongTierDim))); } + public void testKeyEquality() throws Exception { + Set dims1 = new HashSet<>(); + dims1.add(new CacheStatsDimension("a", "1")); + dims1.add(new CacheStatsDimension("b", "2")); + dims1.add(new CacheStatsDimension("c", "3")); + MultiDimensionCacheStats.Key key1 = new MultiDimensionCacheStats.Key(dims1); + + List dims2 = new ArrayList<>(); + dims2.add(new CacheStatsDimension("c", "3")); + dims2.add(new CacheStatsDimension("a", "1")); + dims2.add(new CacheStatsDimension("b", "2")); + MultiDimensionCacheStats.Key key2 = new MultiDimensionCacheStats.Key(dims2); + + assertEquals(key1, key2); + assertEquals(key1.hashCode(), key2.hashCode()); + } + private Map> getUsedDimensionValues(MultiDimensionCacheStats stats, int numValuesPerDim) { Map> usedDimensionValues = new HashMap<>(); for (int i = 0; i < stats.dimensionNames.size(); i++) { From 2aeaa53a5c2617d81e3de257c8340e6e537b1aad Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Fri, 1 Mar 2024 10:46:00 -0800 Subject: [PATCH 30/32] Added reset() to CacheStats Signed-off-by: Peter Alfonsi --- .../common/cache/stats/CacheStats.java | 3 +++ .../cache/stats/MultiDimensionCacheStats.java | 9 +++++++ .../cache/store/OpenSearchOnHeapCache.java | 2 +- .../stats/MultiDimensionCacheStatsTests.java | 25 +++++++++++++++++++ 4 files changed, 38 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java index 9d53585708dd7..bc5d9a0d1df8c 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -48,4 +48,7 @@ public interface CacheStats extends Writeable { void incrementEntriesByDimensions(List dimensions); void decrementEntriesByDimensions(List dimensions); + // Resets memory and entries stats but leaves the others; called when the cache clears itself. + void reset(); + } diff --git a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java index 370010dd7e282..1460f6366c10f 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java @@ -226,6 +226,15 @@ public void decrementEntriesByDimensions(List dimensions) { internalIncrement(dimensions, (response, amount) -> response.entries.inc(amount), -1); } + @Override + public void reset() { + for (Key key : map.keySet()) { + CacheStatsResponse response = map.get(key); + response.memorySize.dec(response.getMemorySize()); + response.entries.dec(response.getEntries()); + } + } + private CacheStatsResponse internalGetStats(List dimensions) { assert dimensions.size() == dimensionNames.size(); CacheStatsResponse response = map.get(new Key(dimensions)); diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index 758ba360eb27c..6cb366ae48f33 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -102,7 +102,7 @@ public void invalidate(ICacheKey key) { @Override public void invalidateAll() { cache.invalidateAll(); - stats = new MultiDimensionCacheStats(dimensionNames, TIER_DIMENSION_VALUE); + stats.reset(); } @Override diff --git a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java index 7855a2d202246..59102c768a858 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java @@ -10,6 +10,7 @@ import org.opensearch.common.Randomness; import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.metrics.CounterMetric; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.BytesStreamInput; import org.opensearch.test.OpenSearchTestCase; @@ -166,6 +167,30 @@ public void testKeyEquality() throws Exception { assertEquals(key1.hashCode(), key2.hashCode()); } + public void testReset() throws Exception { + List dimensionNames = List.of("dim1", "dim2"); + MultiDimensionCacheStats stats = new MultiDimensionCacheStats(dimensionNames, tierDimensionValue); + Map> usedDimensionValues = getUsedDimensionValues(stats, 10); + Map, CacheStatsResponse> expected = populateStats(stats, usedDimensionValues, 100, 10); + + stats.reset(); + + for (Set dimSet : expected.keySet()) { + List dims = new ArrayList<>(dimSet); + CacheStatsResponse originalResponse = expected.get(dimSet); + originalResponse.memorySize = new CounterMetric(); + originalResponse.entries = new CounterMetric(); + CacheStatsResponse actual = stats.getStatsByDimensions(dims); + assertEquals(originalResponse, actual); + + assertEquals(originalResponse.getHits(), stats.getHitsByDimensions(dims)); + assertEquals(originalResponse.getMisses(), stats.getMissesByDimensions(dims)); + assertEquals(originalResponse.getEvictions(), stats.getEvictionsByDimensions(dims)); + assertEquals(originalResponse.getMemorySize(), stats.getMemorySizeByDimensions(dims)); + assertEquals(originalResponse.getEntries(), stats.getEntriesByDimensions(dims)); + } + } + private Map> getUsedDimensionValues(MultiDimensionCacheStats stats, int numValuesPerDim) { Map> usedDimensionValues = new HashMap<>(); for (int i = 0; i < stats.dimensionNames.size(); i++) { From 60df76166c8f21c467508f2be367fcd05cbd76c9 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Fri, 1 Mar 2024 10:53:18 -0800 Subject: [PATCH 31/32] Fixed reset impl for multi dim stats Signed-off-by: Peter Alfonsi --- .../cache/stats/MultiDimensionCacheStats.java | 2 ++ .../stats/MultiDimensionCacheStatsTests.java | 15 +++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java index 1460f6366c10f..7546d129973a2 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java @@ -233,6 +233,8 @@ public void reset() { response.memorySize.dec(response.getMemorySize()); response.entries.dec(response.getEntries()); } + totalStats.memorySize.dec(totalStats.getMemorySize()); + totalStats.entries.dec(totalStats.getEntries()); } private CacheStatsResponse internalGetStats(List dimensions) { diff --git a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java index 59102c768a858..03cd831de14d4 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java @@ -11,6 +11,7 @@ import org.opensearch.common.Randomness; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.common.recycler.Recycler; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.BytesStreamInput; import org.opensearch.test.OpenSearchTestCase; @@ -189,6 +190,20 @@ public void testReset() throws Exception { assertEquals(originalResponse.getMemorySize(), stats.getMemorySizeByDimensions(dims)); assertEquals(originalResponse.getEntries(), stats.getEntriesByDimensions(dims)); } + + CacheStatsResponse expectedTotal = new CacheStatsResponse(); + for (Set dimSet : expected.keySet()) { + expectedTotal.add(expected.get(dimSet)); + } + expectedTotal.memorySize = new CounterMetric(); + expectedTotal.entries = new CounterMetric(); + assertEquals(expectedTotal, stats.getTotalStats()); + + assertEquals(expectedTotal.getHits(), stats.getTotalHits()); + assertEquals(expectedTotal.getMisses(), stats.getTotalMisses()); + assertEquals(expectedTotal.getEvictions(), stats.getTotalEvictions()); + assertEquals(expectedTotal.getMemorySize(), stats.getTotalMemorySize()); + assertEquals(expectedTotal.getEntries(), stats.getTotalEntries()); } private Map> getUsedDimensionValues(MultiDimensionCacheStats stats, int numValuesPerDim) { From ea33af842ccb3ff45f50f2639c4e4ec8972e6105 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Fri, 1 Mar 2024 15:35:04 -0800 Subject: [PATCH 32/32] spotlessApply Signed-off-by: Peter Alfonsi --- .../common/tier/TieredSpilloverCache.java | 33 +++++++-------- .../tier/TieredSpilloverCacheTests.java | 2 +- .../opensearch/cache/EhcacheCachePlugin.java | 1 - .../cache/store/disk/EhcacheDiskCache.java | 42 ++++++++++++------- .../store/disk/EhCacheDiskCacheTests.java | 31 ++++++++++---- .../opensearch/common/cache/ICacheKey.java | 1 - .../common/cache/stats/CacheStats.java | 15 ++++++- .../cache/stats/CacheStatsDimension.java | 1 + .../cache/stats/CacheStatsResponse.java | 2 +- .../cache/stats/MultiDimensionCacheStats.java | 18 +++++--- .../cache/store/OpenSearchOnHeapCache.java | 20 ++++----- .../cache/store/builders/ICacheBuilder.java | 2 +- .../cache/store/config/CacheConfig.java | 2 +- .../indices/IRCKeyWriteableSerializer.java | 4 +- .../BytesReferenceSerializerTests.java | 1 - .../serializer/ICacheKeySerializerTests.java | 7 ++-- .../cache/stats/CacheStatsDimensionTests.java | 2 +- .../stats/MultiDimensionCacheStatsTests.java | 32 +++++++++++--- .../store/OpenSearchOnHeapCacheTests.java | 5 ++- .../IRCKeyWriteableSerializerTests.java | 6 +-- 20 files changed, 143 insertions(+), 84 deletions(-) diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java index c96e98a2a3be8..e8a3e7985703c 100644 --- a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java +++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java @@ -48,8 +48,8 @@ public class TieredSpilloverCache implements ICache { private final ICache onHeapCache; // TODO: Listeners for removals from the two tiers - //private final RemovalListener, V> onDiskRemovalListener; - //private final RemovalListener, V> onHeapRemovalListener; + // private final RemovalListener, V> onDiskRemovalListener; + // private final RemovalListener, V> onHeapRemovalListener; // The listener for removals from the spillover cache as a whole private final RemovalListener, V> removalListener; @@ -67,21 +67,20 @@ public class TieredSpilloverCache implements ICache { Objects.requireNonNull(builder.diskCacheFactory, "disk cache builder can't be null"); this.removalListener = Objects.requireNonNull(builder.removalListener, "Removal listener can't be null"); - this.onHeapCache = builder.onHeapCacheFactory.create( - new CacheConfig.Builder().setRemovalListener(new RemovalListener<>() { - @Override - public void onRemoval(RemovalNotification, V> notification) { - try (ReleasableLock ignore = writeLock.acquire()) { - diskCache.put(notification.getKey(), notification.getValue()); - } - removalListener.onRemoval(notification); - } - }) - .setKeyType(builder.cacheConfig.getKeyType()) - .setValueType(builder.cacheConfig.getValueType()) - .setSettings(builder.cacheConfig.getSettings()) - .setWeigher(builder.cacheConfig.getWeigher()) - .build(), + this.onHeapCache = builder.onHeapCacheFactory.create(new CacheConfig.Builder().setRemovalListener(new RemovalListener<>() { + @Override + public void onRemoval(RemovalNotification, V> notification) { + try (ReleasableLock ignore = writeLock.acquire()) { + diskCache.put(notification.getKey(), notification.getValue()); + } + removalListener.onRemoval(notification); + } + }) + .setKeyType(builder.cacheConfig.getKeyType()) + .setValueType(builder.cacheConfig.getValueType()) + .setSettings(builder.cacheConfig.getSettings()) + .setWeigher(builder.cacheConfig.getWeigher()) + .build(), builder.cacheType, builder.cacheFactories diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java index c1f5abe5a2fe1..e1a48ec6fdae9 100644 --- a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java +++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java @@ -30,7 +30,7 @@ import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY; public class TieredSpilloverCacheTests extends OpenSearchTestCase { -// TODO: TSC has no stats implementation yet - fix these tests once it does + // TODO: These tests are uncommented in the second stats rework PR, which adds a TSC stats implementation /*public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception { int onHeapCacheSize = randomIntBetween(10, 30); int keyValueSize = 50; diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java index 510130184397a..ceda96e4a7d7d 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java @@ -16,7 +16,6 @@ import org.opensearch.plugins.Plugin; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index e769f0e0d7ca1..fcc661fd16676 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -10,25 +10,22 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.spi.serialization.SerializerException; import org.opensearch.OpenSearchException; import org.opensearch.cache.EhcacheDiskCacheSettings; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.serializer.ICacheKeySerializer; +import org.opensearch.common.cache.serializer.Serializer; import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.ICacheKey; -import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.common.cache.stats.MultiDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; -import org.opensearch.common.cache.serializer.ICacheKeySerializer; -import org.opensearch.common.cache.serializer.Serializer; import org.opensearch.common.cache.store.config.CacheConfig; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Setting; @@ -60,6 +57,7 @@ import org.ehcache.config.builders.PooledExecutionServiceConfigurationBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.spi.service.FileBasedPersistenceContext; import org.ehcache.event.CacheEvent; import org.ehcache.event.CacheEventListener; import org.ehcache.event.EventType; @@ -67,6 +65,7 @@ import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; import org.ehcache.spi.loaderwriter.CacheLoadingException; import org.ehcache.spi.loaderwriter.CacheWritingException; +import org.ehcache.spi.serialization.SerializerException; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_CACHE_ALIAS_KEY; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY; @@ -153,7 +152,8 @@ private EhcacheDiskCache(Builder builder) { this.ehCacheEventListener = new EhCacheEventListener( Objects.requireNonNull(builder.getRemovalListener(), "Removal listener can't be null"), Objects.requireNonNull(builder.getWeigher(), "Weigher function can't be null"), - this.valueSerializer); + this.valueSerializer + ); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); List dimensionNames = Objects.requireNonNull(builder.dimensionNames, "Dimension names can't be null"); this.stats = new MultiDimensionCacheStats(dimensionNames, TIER_DIMENSION_VALUE); @@ -456,9 +456,11 @@ class EhCacheEventListener implements CacheEventListener, byt private ToLongBiFunction, V> weigher; private Serializer valueSerializer; - EhCacheEventListener(RemovalListener, V> removalListener, - ToLongBiFunction, V> weigher, - Serializer valueSerializer) { + EhCacheEventListener( + RemovalListener, V> removalListener, + ToLongBiFunction, V> weigher, + Serializer valueSerializer + ) { this.removalListener = removalListener; this.weigher = weigher; this.valueSerializer = valueSerializer; @@ -481,20 +483,30 @@ public void onEvent(CacheEvent, ? extends byte[]> event) assert event.getOldValue() == null; break; case EVICTED: - this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.EVICTED)); + this.removalListener.onRemoval( + new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.EVICTED) + ); stats.decrementEntriesByDimensions(event.getKey().dimensions); stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getOldValuePairSize(event)); stats.incrementEvictionsByDimensions(event.getKey().dimensions); assert event.getNewValue() == null; break; case REMOVED: - this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.EXPLICIT)); + this.removalListener.onRemoval( + new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.EXPLICIT) + ); stats.decrementEntriesByDimensions(event.getKey().dimensions); stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getOldValuePairSize(event)); assert event.getNewValue() == null; break; case EXPIRED: - this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), valueSerializer.deserialize(event.getOldValue()), RemovalReason.INVALIDATED)); + this.removalListener.onRemoval( + new RemovalNotification<>( + event.getKey(), + valueSerializer.deserialize(event.getOldValue()), + RemovalReason.INVALIDATED + ) + ); stats.decrementEntriesByDimensions(event.getKey().dimensions); stats.incrementMemorySizeByDimensions(event.getKey().dimensions, -getOldValuePairSize(event)); assert event.getNewValue() == null; @@ -512,6 +524,7 @@ public void onEvent(CacheEvent, ? extends byte[]> event) private class KeySerializerWrapper implements org.ehcache.spi.serialization.Serializer { private ICacheKeySerializer serializer; + public KeySerializerWrapper(Serializer internalKeySerializer) { this.serializer = new ICacheKeySerializer<>(internalKeySerializer); } @@ -520,6 +533,7 @@ public KeySerializerWrapper(Serializer internalKeySerializer) { // cache after a restart. // See https://www.ehcache.org/documentation/3.0/serializers-copiers.html#persistent-vs-transient-caches public KeySerializerWrapper(ClassLoader classLoader, FileBasedPersistenceContext persistenceContext) {} + @Override public ByteBuffer serialize(ICacheKey object) throws SerializerException { return ByteBuffer.wrap(serializer.serialize(object)); @@ -705,7 +719,7 @@ public Builder setValueSerializer(Serializer valueSerializer) { return this; } - //@Override + // @Override public EhcacheDiskCache build() { return new EhcacheDiskCache<>(this); } diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index a25cde40396a6..20230878dbf89 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -11,12 +11,12 @@ import org.opensearch.cache.EhcacheDiskCacheSettings; import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; -import org.opensearch.common.cache.stats.CacheStatsDimension; -import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.serializer.Serializer; +import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.common.cache.store.config.CacheConfig; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -39,9 +39,9 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.ToLongBiFunction; -import static org.hamcrest.CoreMatchers.instanceOf; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_STORAGE_PATH_KEY; +import static org.hamcrest.CoreMatchers.instanceOf; public class EhCacheDiskCacheTests extends OpenSearchSingleNodeTestCase { @@ -500,7 +500,7 @@ public void testComputeIfAbsentWithNullValueLoading() throws Exception { // Try to hit different request with the same key concurrently. Loader throws exception. for (int i = 0; i < numberOfRequest; i++) { threads[i] = new Thread(() -> { - LoadAwareCacheLoader, String> loadAwareCacheLoader = new LoadAwareCacheLoader<>() { + LoadAwareCacheLoader, String> loadAwareCacheLoader = new LoadAwareCacheLoader<>() { boolean isLoaded; @Override @@ -543,7 +543,10 @@ public void testMemoryTracking() throws Exception { ToLongBiFunction, String> weigher = getWeigher(); int initialKeyLength = 40; int initialValueLength = 40; - long sizeForOneInitialEntry = weigher.applyAsLong(new ICacheKey<>(generateRandomString(initialKeyLength), getMockDimensions()), generateRandomString(initialValueLength)); + long sizeForOneInitialEntry = weigher.applyAsLong( + new ICacheKey<>(generateRandomString(initialKeyLength), getMockDimensions()), + generateRandomString(initialValueLength) + ); int maxEntries = 2000; try (NodeEnvironment env = newNodeEnvironment(settings)) { ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") @@ -603,7 +606,7 @@ public void testMemoryTracking() throws Exception { } // TODO: Ehcache incorrectly evicts at 30-40% of max size. Fix this test once we figure out why. // Since the EVICTED and EXPIRED cases use the same code as REMOVED, we should be ok on testing them for now. - //assertEquals(maxEntries * sizeForOneInitialEntry, ehcacheTest.stats().getTotalMemorySize()); + // assertEquals(maxEntries * sizeForOneInitialEntry, ehcacheTest.stats().getTotalMemorySize()); ehcacheTest.close(); } @@ -632,8 +635,18 @@ public void testGetStatsByTierName() throws Exception { for (int i = 0; i < randomKeys; i++) { ehcacheTest.put(getICacheKey(UUID.randomUUID().toString()), UUID.randomUUID().toString()); } - assertEquals(randomKeys, ehcacheTest.stats().getEntriesByDimensions(List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, EhcacheDiskCache.TIER_DIMENSION_VALUE)))); - assertEquals(0, ehcacheTest.stats().getEntriesByDimensions(List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, "other_tier_value")))); + assertEquals( + randomKeys, + ehcacheTest.stats() + .getEntriesByDimensions( + List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, EhcacheDiskCache.TIER_DIMENSION_VALUE)) + ) + ); + assertEquals( + 0, + ehcacheTest.stats() + .getEntriesByDimensions(List.of(new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, "other_tier_value"))) + ); ehcacheTest.close(); } @@ -676,6 +689,7 @@ private ToLongBiFunction, String> getWeigher() { class MockRemovalListener implements RemovalListener, V> { AtomicInteger onRemovalCount = new AtomicInteger(); + @Override public void onRemoval(RemovalNotification, V> notification) { onRemovalCount.incrementAndGet(); @@ -684,6 +698,7 @@ public void onRemoval(RemovalNotification, V> notification) { static class StringSerializer implements Serializer { private final Charset charset = StandardCharsets.UTF_8; + @Override public byte[] serialize(String object) { return object.getBytes(charset); diff --git a/server/src/main/java/org/opensearch/common/cache/ICacheKey.java b/server/src/main/java/org/opensearch/common/cache/ICacheKey.java index 8acf0352f25d2..51cb1712873c1 100644 --- a/server/src/main/java/org/opensearch/common/cache/ICacheKey.java +++ b/server/src/main/java/org/opensearch/common/cache/ICacheKey.java @@ -8,7 +8,6 @@ package org.opensearch.common.cache; - import org.opensearch.common.cache.stats.CacheStatsDimension; import java.util.List; diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java index bc5d9a0d1df8c..7b24e3412c1f6 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -22,30 +22,43 @@ public interface CacheStats extends Writeable { // Methods to get all 5 values at once, either in total or for a specific set of dimensions. CacheStatsResponse getTotalStats(); + CacheStatsResponse getStatsByDimensions(List dimensions); // Methods to get total values. long getTotalHits(); + long getTotalMisses(); + long getTotalEvictions(); + long getTotalMemorySize(); + long getTotalEntries(); // Methods to get values for a specific set of dimensions. // Returns the sum of values for cache entries that match all dimensions in the list. long getHitsByDimensions(List dimensions); + long getMissesByDimensions(List dimensions); + long getEvictionsByDimensions(List dimensions); + long getMemorySizeByDimensions(List dimensions); - long getEntriesByDimensions(List dimensions); + long getEntriesByDimensions(List dimensions); void incrementHitsByDimensions(List dimensions); + void incrementMissesByDimensions(List dimensions); + void incrementEvictionsByDimensions(List dimensions); + // Can also use to decrement, with negative values void incrementMemorySizeByDimensions(List dimensions, long amountBytes); + void incrementEntriesByDimensions(List dimensions); + void decrementEntriesByDimensions(List dimensions); // Resets memory and entries stats but leaves the others; called when the cache clears itself. diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java index 5ba83122f7fd2..9aee24efb46f0 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsDimension.java @@ -20,6 +20,7 @@ public class CacheStatsDimension implements Writeable { public static final String TIER_DIMENSION_NAME = "tier"; public final String dimensionName; public final String dimensionValue; + public CacheStatsDimension(String dimensionName, String dimensionValue) { this.dimensionName = dimensionName; this.dimensionValue = dimensionValue; diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java index fededa326d3e5..520a771510c43 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStatsResponse.java @@ -44,7 +44,7 @@ public CacheStatsResponse(StreamInput in) throws IOException { } public CacheStatsResponse() { - this(0,0,0,0,0); + this(0, 0, 0, 0, 0); } public synchronized void add(CacheStatsResponse other) { diff --git a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java index 7546d129973a2..1f977a7c040b3 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/MultiDimensionCacheStats.java @@ -13,7 +13,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -43,7 +42,8 @@ public class MultiDimensionCacheStats implements CacheStats { // Package-private for testing. final String tierDimensionValue; - // A map from a set of cache stats dimensions -> stats for that combination of dimensions. Does not include the tier dimension in its keys. + // A map from a set of cache stats dimensions -> stats for that combination of dimensions. Does not include the tier dimension in its + // keys. final ConcurrentMap map; final int maxDimensionValues; @@ -81,7 +81,7 @@ public void writeTo(StreamOutput out) throws IOException { map, (o, key) -> o.writeArray((o1, dim) -> ((CacheStatsDimension) dim).writeTo(o1), key.dimensions.toArray()), (o, response) -> response.writeTo(o) - ); + ); totalStats.writeTo(out); out.writeVInt(maxDimensionValues); } @@ -103,7 +103,8 @@ public CacheStatsResponse getStatsByDimensions(List dimensi CacheStatsDimension tierDim = getTierDimension(dimensions); if (tierDim == null || tierDim.dimensionValue.equals(tierDimensionValue)) { - // If there is no tier dimension, or if the tier dimension value matches the one for this stats object, return an aggregated response over the non-tier dimensions + // If there is no tier dimension, or if the tier dimension value matches the one for this stats object, return an aggregated + // response over the non-tier dimensions List modifiedDimensions = new ArrayList<>(dimensions); if (tierDim != null) { modifiedDimensions.remove(tierDim); @@ -262,19 +263,24 @@ private void internalIncrement(List dimensions, BiConsumer< */ static class Key { final Set dimensions; + Key(Set dimensions) { this.dimensions = Collections.unmodifiableSet(dimensions); } + Key(List dimensions) { this(new HashSet<>(dimensions)); } + @Override public boolean equals(Object o) { if (o == this) { return true; - } if (o == null) { + } + if (o == null) { return false; - } if (o.getClass() != Key.class) { + } + if (o.getClass() != Key.class) { return false; } Key other = (Key) o; diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index 6cb366ae48f33..e242d084ec2a7 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -12,26 +12,23 @@ import org.opensearch.common.cache.CacheBuilder; import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.ICacheKey; import org.opensearch.common.cache.stats.MultiDimensionCacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.config.CacheConfig; -import org.opensearch.common.settings.Settings; - -import java.util.List; -import java.util.Objects; - import org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings; import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; import org.opensearch.core.common.unit.ByteSizeValue; +import java.util.List; import java.util.Map; -import java.util.function.ToLongBiFunction; +import java.util.Objects; import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY; @@ -132,7 +129,10 @@ public CacheStats stats() { public void onRemoval(RemovalNotification, V> notification) { removalListener.onRemoval(notification); stats.decrementEntriesByDimensions(notification.getKey().dimensions); - stats.incrementMemorySizeByDimensions(notification.getKey().dimensions, -cache.getWeigher().applyAsLong(notification.getKey(), notification.getValue())); + stats.incrementMemorySizeByDimensions( + notification.getKey().dimensions, + -cache.getWeigher().applyAsLong(notification.getKey(), notification.getValue()) + ); if (RemovalReason.EVICTED.equals(notification.getRemovalReason()) || RemovalReason.CAPACITY.equals(notification.getRemovalReason())) { @@ -151,8 +151,7 @@ public static class OpenSearchOnHeapCacheFactory implements Factory { public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { Map> settingList = OpenSearchOnHeapCacheSettings.getSettingListForCacheType(cacheType); Settings settings = config.getSettings(); - return new Builder() - .setDimensionNames(config.getDimensionNames()) + return new Builder().setDimensionNames(config.getDimensionNames()) .setMaximumWeightInBytes(((ByteSizeValue) settingList.get(MAXIMUM_SIZE_IN_BYTES_KEY).get(settings)).getBytes()) .setWeigher(config.getWeigher()) .setRemovalListener(config.getRemovalListener()) @@ -178,6 +177,7 @@ public Builder setDimensionNames(List dimensionNames) { this.dimensionNames = dimensionNames; return this; } + @Override public ICache build() { return new OpenSearchOnHeapCache(this); diff --git a/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java b/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java index 02b3c4f802536..3fc43767a03e7 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java +++ b/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java @@ -10,8 +10,8 @@ import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.ICacheKey; +import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; diff --git a/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java b/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java index 0bf325cdd5a86..2c2a93ae9ab67 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java +++ b/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java @@ -9,8 +9,8 @@ package org.opensearch.common.cache.store.config; import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.ICacheKey; +import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.serializer.Serializer; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java b/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java index 56d16e84f3f42..b83957d4a2508 100644 --- a/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java +++ b/server/src/main/java/org/opensearch/indices/IRCKeyWriteableSerializer.java @@ -22,9 +22,7 @@ */ public class IRCKeyWriteableSerializer implements Serializer { - - public IRCKeyWriteableSerializer() { - } + public IRCKeyWriteableSerializer() {} @Override public byte[] serialize(IndicesRequestCache.Key object) { diff --git a/server/src/test/java/org/opensearch/common/cache/serializer/BytesReferenceSerializerTests.java b/server/src/test/java/org/opensearch/common/cache/serializer/BytesReferenceSerializerTests.java index d316cfe9630ae..b1d9e762d5df7 100644 --- a/server/src/test/java/org/opensearch/common/cache/serializer/BytesReferenceSerializerTests.java +++ b/server/src/test/java/org/opensearch/common/cache/serializer/BytesReferenceSerializerTests.java @@ -10,7 +10,6 @@ import org.opensearch.common.Randomness; import org.opensearch.common.bytes.ReleasableBytesReference; -import org.opensearch.common.cache.serializer.BytesReferenceSerializer; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.core.common.bytes.BytesArray; diff --git a/server/src/test/java/org/opensearch/common/cache/serializer/ICacheKeySerializerTests.java b/server/src/test/java/org/opensearch/common/cache/serializer/ICacheKeySerializerTests.java index e3875b64c3818..968d9dd64b01d 100644 --- a/server/src/test/java/org/opensearch/common/cache/serializer/ICacheKeySerializerTests.java +++ b/server/src/test/java/org/opensearch/common/cache/serializer/ICacheKeySerializerTests.java @@ -9,10 +9,8 @@ package org.opensearch.common.cache.serializer; import org.opensearch.common.Randomness; -import org.opensearch.common.cache.serializer.BytesReferenceSerializer; -import org.opensearch.common.cache.serializer.ICacheKeySerializer; -import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.common.cache.ICacheKey; +import org.opensearch.common.cache.stats.CacheStatsDimension; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.test.OpenSearchTestCase; @@ -44,7 +42,7 @@ public void testDimNumbers() throws Exception { BytesReferenceSerializer keySer = new BytesReferenceSerializer(); ICacheKeySerializer serializer = new ICacheKeySerializer<>(keySer); - for (int numDims : new int[]{0, 5, 1000}) { + for (int numDims : new int[] { 0, 5, 1000 }) { List dims = new ArrayList<>(); for (int j = 0; j < numDims; j++) { dims.add(getRandomDim()); @@ -64,6 +62,7 @@ public void testHashCodes() throws Exception { assertEquals(key1, key2); assertEquals(key1.hashCode(), key2.hashCode()); } + public void testNullInputs() throws Exception { BytesReferenceSerializer keySer = new BytesReferenceSerializer(); ICacheKeySerializer serializer = new ICacheKeySerializer<>(keySer); diff --git a/server/src/test/java/org/opensearch/common/cache/stats/CacheStatsDimensionTests.java b/server/src/test/java/org/opensearch/common/cache/stats/CacheStatsDimensionTests.java index 0af171191cdd4..21c0c46991be5 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/CacheStatsDimensionTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/CacheStatsDimensionTests.java @@ -34,7 +34,7 @@ public void testEquality() throws Exception { String value = "dimension_value"; CacheStatsDimension dim = new CacheStatsDimension(name, value); assertEquals(dim, new CacheStatsDimension(name, value)); - assertNotEquals(dim, new CacheStatsDimension("a", "b")); + assertNotEquals(dim, new CacheStatsDimension("a", "b")); assertNotEquals(dim, null); assertNotEquals(dim, new CacheStatsDimension(null, null)); } diff --git a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java index 03cd831de14d4..63f747d63ff08 100644 --- a/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java +++ b/server/src/test/java/org/opensearch/common/cache/stats/MultiDimensionCacheStatsTests.java @@ -11,7 +11,6 @@ import org.opensearch.common.Randomness; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.metrics.CounterMetric; -import org.opensearch.common.recycler.Recycler; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.BytesStreamInput; import org.opensearch.test.OpenSearchTestCase; @@ -66,7 +65,12 @@ public void testAddAndGet() throws Exception { // test gets for aggregations of values: for example, dim1="a", dim2="b", but dim3 and dim4 can be anything // test a random subset of these, there are combinatorially many possibilities for (int i = 0; i < 1000; i++) { - List aggregationDims = getRandomDimList(stats.dimensionNames, usedDimensionValues, false, Randomness.get()); + List aggregationDims = getRandomDimList( + stats.dimensionNames, + usedDimensionValues, + false, + Randomness.get() + ); CacheStatsResponse expectedResponse = new CacheStatsResponse(); for (Set dimSet : expected.keySet()) { if (dimSet.containsAll(aggregationDims)) { @@ -131,7 +135,12 @@ public void testTierLogic() throws Exception { CacheStatsDimension wrongTierDim = new CacheStatsDimension(CacheStatsDimension.TIER_DIMENSION_NAME, "wrong_value"); for (int i = 0; i < 1000; i++) { - List aggregationDims = getRandomDimList(stats.dimensionNames, usedDimensionValues, false, Randomness.get()); + List aggregationDims = getRandomDimList( + stats.dimensionNames, + usedDimensionValues, + false, + Randomness.get() + ); List aggDimsWithTier = new ArrayList<>(aggregationDims); aggDimsWithTier.add(tierDim); @@ -218,7 +227,12 @@ private Map> getUsedDimensionValues(MultiDimensionCacheStat return usedDimensionValues; } - private Map, CacheStatsResponse> populateStats(MultiDimensionCacheStats stats, Map> usedDimensionValues, int numDistinctValuePairs, int numRepetitionsPerValue) { + private Map, CacheStatsResponse> populateStats( + MultiDimensionCacheStats stats, + Map> usedDimensionValues, + int numDistinctValuePairs, + int numRepetitionsPerValue + ) { Map, CacheStatsResponse> expected = new HashMap<>(); Random rand = Randomness.get(); @@ -272,10 +286,16 @@ private Map, CacheStatsResponse> populateStats(MultiDim return expected; } - private List getRandomDimList(List dimensionNames, Map> usedDimensionValues, boolean pickValueForAllDims, Random rand) { + private List getRandomDimList( + List dimensionNames, + Map> usedDimensionValues, + boolean pickValueForAllDims, + Random rand + ) { List result = new ArrayList<>(); for (String dimName : dimensionNames) { - if (pickValueForAllDims || rand.nextBoolean()) { // if pickValueForAllDims, always pick a value for each dimension, otherwise do so 50% of the time + if (pickValueForAllDims || rand.nextBoolean()) { // if pickValueForAllDims, always pick a value for each dimension, otherwise do + // so 50% of the time int index = between(0, usedDimensionValues.get(dimName).size() - 1); result.add(new CacheStatsDimension(dimName, usedDimensionValues.get(dimName).get(index))); } diff --git a/server/src/test/java/org/opensearch/common/cache/store/OpenSearchOnHeapCacheTests.java b/server/src/test/java/org/opensearch/common/cache/store/OpenSearchOnHeapCacheTests.java index 22d8813aede19..b02195b67437d 100644 --- a/server/src/test/java/org/opensearch/common/cache/store/OpenSearchOnHeapCacheTests.java +++ b/server/src/test/java/org/opensearch/common/cache/store/OpenSearchOnHeapCacheTests.java @@ -30,6 +30,7 @@ public class OpenSearchOnHeapCacheTests extends OpenSearchTestCase { private final static long keyValueSize = 50; private final static List dimensionNames = List.of("dim1", "dim2"); + public void testStats() throws Exception { MockRemovalListener listener = new MockRemovalListener<>(); int maxKeys = between(10, 50); @@ -85,8 +86,7 @@ private OpenSearchOnHeapCache getCache(int maxSizeKeys, MockRemo ) .build(); - CacheConfig cacheConfig = new CacheConfig.Builder() - .setKeyType(String.class) + CacheConfig cacheConfig = new CacheConfig.Builder().setKeyType(String.class) .setValueType(String.class) .setWeigher((k, v) -> keyValueSize) .setRemovalListener(listener) @@ -98,6 +98,7 @@ private OpenSearchOnHeapCache getCache(int maxSizeKeys, MockRemo private static class MockRemovalListener implements RemovalListener, V> { CounterMetric numRemovals; + MockRemovalListener() { numRemovals = new CounterMetric(); } diff --git a/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java b/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java index a4a97b5c8470f..af657dadd7a1a 100644 --- a/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java +++ b/server/src/test/java/org/opensearch/indices/IRCKeyWriteableSerializerTests.java @@ -39,11 +39,7 @@ public void testSerializer() throws Exception { } } - private IndicesRequestCache.Key getRandomIRCKey( - int valueLength, - Random random, - ShardId shard - ) { + private IndicesRequestCache.Key getRandomIRCKey(int valueLength, Random random, ShardId shard) { byte[] value = new byte[valueLength]; for (int i = 0; i < valueLength; i++) { value[i] = (byte) (random.nextInt(126 - 32) + 32);