diff --git a/README.md b/README.md
index fe2d25a3..45837724 100644
--- a/README.md
+++ b/README.md
@@ -25,7 +25,7 @@ Both of these features ensure that our library can be seamlessly integrated into
To include in your project:
##### Gradle
```groovy
-implementation 'com.cedarsoftware:java-util:2.10.0'
+implementation 'com.cedarsoftware:java-util:2.11.0'
```
##### Maven
@@ -33,7 +33,7 @@ implementation 'com.cedarsoftware:java-util:2.10.0'
com.cedarsoftware
java-util
- 2.10.0
+ 2.11.0
```
---
diff --git a/changelog.md b/changelog.md
index 0e96e333..151a41d1 100644
--- a/changelog.md
+++ b/changelog.md
@@ -1,4 +1,6 @@
### Revision History
+* 2.11.0
+ * `LRUCache` re-written so that it operates in O(1) for `get(),` `put(),` and `remove()` methods without thread contention. When items are placed into (or removed from) the cache, it schedules a cleanup task to trim the cache to its capacity. This means that it will operate as fast as a `ConcurrentHashMap,` yet shrink to capacity quickly after modifications.
* 2.10.0
* Fixed potential memory leak in `LRUCache.`
* Added `nextPermutation` to `MathUtilities.`
diff --git a/pom.xml b/pom.xml
index e5e77eb4..5d80af63 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,7 +5,7 @@
com.cedarsoftware
java-util
bundle
- 2.10.0
+ 2.11.0
Java Utilities
https://github.com/jdereg/java-util
diff --git a/src/main/java/com/cedarsoftware/util/LRUCache.java b/src/main/java/com/cedarsoftware/util/LRUCache.java
index 8ea9de82..f286b667 100644
--- a/src/main/java/com/cedarsoftware/util/LRUCache.java
+++ b/src/main/java/com/cedarsoftware/util/LRUCache.java
@@ -1,21 +1,28 @@
package com.cedarsoftware.util;
+import java.lang.ref.WeakReference;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
-import java.util.LinkedHashMap;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
import java.util.Map;
-import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
/**
* This class provides a thread-safe Least Recently Used (LRU) cache API that will evict the least recently used items,
* once a threshold is met. It implements the Map interface for convenience. It is thread-safe via usage of
- * ReentrantReadWriteLock() around read and write APIs, including delegating to keySet(), entrySet(), and
- * values() and each of their iterators.
+ * ConcurrentHashMap for internal storage. The .get(), .remove(), and .put() APIs operate in O(1) without any
+ * blocking. A background thread monitors and cleans up the internal Map if it exceeds capacity. In addition, if
+ * .put() causes the background thread to be triggered to start immediately. This will keep the size of the LRUCache
+ * close to capacity even with bursty loads without reducing insertion (put) performance.
*
* @author John DeRegnaucourt (jdereg@gmail.com)
*
@@ -34,219 +41,169 @@
* limitations under the License.
*/
public class LRUCache extends AbstractMap implements Map {
- private final Map map;
- private final Node head;
- private final Node tail;
+ private static final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
+ private static final long DELAY = 10; // 1 second delay
private final int capacity;
- private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+ private final ConcurrentHashMap> cache;
+ private volatile boolean cleanupScheduled = false;
- private class Node {
- K key;
- V value;
- Node prev;
- Node next;
+ private static class Node {
+ final K key;
+ volatile V value;
+ volatile long timestamp;
Node(K key, V value) {
this.key = key;
this.value = value;
+ this.timestamp = System.nanoTime();
}
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- Node node = (Node) o;
- return Objects.equals(key, node.key) && Objects.equals(value, node.value);
- }
-
- public int hashCode() {
- return Objects.hash(key, value);
- }
-
- public String toString() {
- return "Node{" +
- "key=" + key +
- ", value=" + value +
- '}';
+ void updateTimestamp() {
+ this.timestamp = System.nanoTime();
}
}
public LRUCache(int capacity) {
this.capacity = capacity;
- this.map = new ConcurrentHashMap<>(capacity);
- this.head = new Node(null, null);
- this.tail = new Node(null, null);
- head.next = tail;
- tail.prev = head;
+ this.cache = new ConcurrentHashMap<>(capacity);
+ }
+
+ private void dynamicCleanup() {
+ int size = cache.size();
+ if (size > capacity) {
+ List> nodes = new ArrayList<>(cache.values());
+ nodes.sort(Comparator.comparingLong(node -> node.timestamp));
+ int nodesToRemove = size - capacity;
+ for (int i = 0; i < nodesToRemove; i++) {
+ Node node = nodes.get(i);
+ cache.remove(node.key, node);
+ }
+ }
+ cleanupScheduled = false; // Reset the flag after cleanup
+ // Check if another cleanup is needed after the current one
+ if (cache.size() > capacity) {
+ scheduleCleanup();
+ }
}
+ @Override
public V get(Object key) {
- lock.readLock().lock();
- try {
- Node node = map.get(key);
- if (node == null) {
- return null;
- }
- moveToHead(node);
+ Node node = cache.get(key);
+ if (node != null) {
+ node.updateTimestamp();
return node.value;
- } finally {
- lock.readLock().unlock();
}
+ return null;
}
+ @Override
public V put(K key, V value) {
- lock.writeLock().lock();
- try {
- Node newNode = new Node(key, value);
- Node oldNode = map.put(key, newNode);
-
- if (oldNode != null) {
- removeNode(oldNode);
- }
-
- addToHead(newNode);
-
- if (map.size() > capacity) {
- Node oldestNode = removeTailNode();
- if (oldestNode != null) {
- map.remove(oldestNode.key);
- }
- }
-
- return oldNode != null ? oldNode.value : null;
- } finally {
- lock.writeLock().unlock();
+ Node newNode = new Node<>(key, value);
+ Node oldNode = cache.put(key, newNode);
+ if (oldNode != null) {
+ newNode.updateTimestamp();
+ return oldNode.value;
+ } else {
+ scheduleCleanup();
+ return null;
}
}
+ @Override
public V remove(Object key) {
- lock.writeLock().lock();
- try {
- Node node = map.remove(key);
- if (node != null) {
- removeNode(node);
- return node.value;
- }
- return null;
- } finally {
- lock.writeLock().unlock();
+ Node node = cache.remove(key);
+ if (node != null) {
+ scheduleCleanup();
+ return node.value;
}
+ return null;
}
+ @Override
public void clear() {
- lock.writeLock().lock();
- try {
- map.clear();
- head.next = tail;
- tail.prev = head;
- } finally {
- lock.writeLock().unlock();
- }
+ cache.clear();
}
+ @Override
public int size() {
- return map.size();
+ return cache.size();
}
+ @Override
public boolean containsKey(Object key) {
- return map.containsKey(key);
+ return cache.containsKey(key);
}
+ @Override
public boolean containsValue(Object value) {
- for (Node node : map.values()) {
- if (Objects.equals(node.value, value)) {
+ for (Node node : cache.values()) {
+ if (node.value.equals(value)) {
return true;
}
}
return false;
}
+ @Override
public Set> entrySet() {
- Map result = new LinkedHashMap<>();
- for (Node node : map.values()) {
- result.put(node.key, node.value);
+ Set> entrySet = Collections.newSetFromMap(new ConcurrentHashMap<>());
+ for (Node node : cache.values()) {
+ entrySet.add(new AbstractMap.SimpleEntry<>(node.key, node.value));
}
- return Collections.unmodifiableSet(result.entrySet());
+ return entrySet;
}
+ @Override
public Set keySet() {
- return Collections.unmodifiableSet(map.keySet());
+ return Collections.unmodifiableSet(cache.keySet());
}
+ @Override
public Collection values() {
Collection values = new ArrayList<>();
- for (Node node : map.values()) {
+ for (Node node : cache.values()) {
values.add(node.value);
}
return Collections.unmodifiableCollection(values);
}
+ @Override
public boolean equals(Object o) {
- if (o == this) {
- return true;
- }
- if (o instanceof Map) {
- Map, ?> other = (Map, ?>) o;
- if (other.size() != this.size()) {
- return false;
- }
- for (Map.Entry, ?> entry : other.entrySet()) {
- V value = this.get(entry.getKey());
- if (!Objects.equals(value, entry.getValue())) {
- return false;
- }
- }
- return true;
- }
- return false;
+ if (this == o) return true;
+ if (!(o instanceof Map)) return false;
+ Map, ?> other = (Map, ?>) o;
+ return this.entrySet().equals(other.entrySet());
}
+ @Override
public int hashCode() {
int hashCode = 1;
- for (Map.Entry entry : map.entrySet()) {
- hashCode = 31 * hashCode + (entry.getKey() == null ? 0 : entry.getKey().hashCode());
- hashCode = 31 * hashCode + (entry.getValue().value == null ? 0 : entry.getValue().value.hashCode());
+ for (Node node : cache.values()) {
+ hashCode = 31 * hashCode + (node.key == null ? 0 : node.key.hashCode());
+ hashCode = 31 * hashCode + (node.value == null ? 0 : node.value.hashCode());
}
return hashCode;
}
-
+
+ @Override
public String toString() {
- StringBuilder sb = new StringBuilder("{");
- for (Map.Entry entry : map.entrySet()) {
- sb.append(entry.getKey()).append("=").append(entry.getValue().value).append(", ");
+ StringBuilder sb = new StringBuilder();
+ sb.append("{");
+ for (Node node : cache.values()) {
+ sb.append(node.key).append("=").append(node.value).append(", ");
}
if (sb.length() > 1) {
- sb.setLength(sb.length() - 2);
+ sb.setLength(sb.length() - 2); // Remove trailing comma and space
}
sb.append("}");
return sb.toString();
}
- private void addToHead(Node node) {
- Node nextNode = head.next;
- node.next = nextNode;
- node.prev = head;
- head.next = node;
- nextNode.prev = node;
- }
-
- private void removeNode(Node node) {
- Node prevNode = node.prev;
- Node nextNode = node.next;
- prevNode.next = nextNode;
- nextNode.prev = prevNode;
- }
-
- private void moveToHead(Node node) {
- removeNode(node);
- addToHead(node);
- }
-
- private Node removeTailNode() {
- Node oldestNode = tail.prev;
- if (oldestNode == head) {
- return null;
+ // Schedule a delayed cleanup
+ private synchronized void scheduleCleanup() {
+ if (cache.size() > capacity && !cleanupScheduled) {
+ cleanupScheduled = true;
+ executorService.schedule(this::dynamicCleanup, DELAY, TimeUnit.MILLISECONDS);
}
- removeNode(oldestNode);
- return oldestNode;
}
}
\ No newline at end of file
diff --git a/src/test/java/com/cedarsoftware/util/LRUCacheTest.java b/src/test/java/com/cedarsoftware/util/LRUCacheTest.java
index 26112a49..39ba819c 100644
--- a/src/test/java/com/cedarsoftware/util/LRUCacheTest.java
+++ b/src/test/java/com/cedarsoftware/util/LRUCacheTest.java
@@ -63,10 +63,25 @@ void testEvictionPolicy() {
lruCache.get(1);
lruCache.put(4, "D");
- assertNull(lruCache.get(2));
- assertEquals("A", lruCache.get(1));
- }
+ // Wait for the background cleanup thread to perform the eviction
+ long startTime = System.currentTimeMillis();
+ long timeout = 5000; // 5 seconds timeout
+ while (System.currentTimeMillis() - startTime < timeout) {
+ if (!lruCache.containsKey(2) && lruCache.containsKey(1) && lruCache.containsKey(4)) {
+ break;
+ }
+ try {
+ Thread.sleep(100); // Check every 100ms
+ } catch (InterruptedException ignored) {
+ }
+ }
+ // Assert the expected cache state
+ assertNull(lruCache.get(2), "Entry for key 2 should be evicted");
+ assertEquals("A", lruCache.get(1), "Entry for key 1 should still be present");
+ assertEquals("D", lruCache.get(4), "Entry for key 4 should be present");
+ }
+
@Test
void testSize() {
lruCache.put(1, "A");
@@ -223,9 +238,6 @@ void testConcurrency() throws InterruptedException {
service.shutdown();
assertTrue(service.awaitTermination(1, TimeUnit.MINUTES));
-// System.out.println("lruCache = " + lruCache);
-// System.out.println("lruCache = " + lruCache.size());
-// System.out.println("attempts =" + attempts);
}
@Test
@@ -309,8 +321,9 @@ void testToString() {
lruCache.put(2, "B");
lruCache.put(3, "C");
- String expected = "{1=A, 2=B, 3=C}";
- assertEquals(expected, lruCache.toString());
+ assert lruCache.toString().contains("1=A");
+ assert lruCache.toString().contains("2=B");
+ assert lruCache.toString().contains("3=C");
}
@Test
@@ -322,43 +335,44 @@ void testFullCycle() {
lruCache.put(5, "E");
lruCache.put(6, "F");
- String expected = "{4=D, 5=E, 6=F}";
- assertEquals(expected, lruCache.toString());
+ long startTime = System.currentTimeMillis();
+ long timeout = 5000; // 5 seconds timeout
+ while (System.currentTimeMillis() - startTime < timeout) {
+ if (lruCache.size() == 3 &&
+ lruCache.containsKey(4) &&
+ lruCache.containsKey(5) &&
+ lruCache.containsKey(6) &&
+ !lruCache.containsKey(1) &&
+ !lruCache.containsKey(2) &&
+ !lruCache.containsKey(3)) {
+ break;
+ }
+ try {
+ Thread.sleep(100); // Check every 100ms
+ } catch (InterruptedException ignored) {
+ }
+ }
+
+ assertEquals(3, lruCache.size(), "Cache size should be 3 after eviction");
+ assertTrue(lruCache.containsKey(4));
+ assertTrue(lruCache.containsKey(5));
+ assertTrue(lruCache.containsKey(6));
+ assertEquals("D", lruCache.get(4));
+ assertEquals("E", lruCache.get(5));
+ assertEquals("F", lruCache.get(6));
lruCache.remove(6);
lruCache.remove(5);
lruCache.remove(4);
- assert lruCache.size() == 0;
+ assertEquals(0, lruCache.size(), "Cache should be empty after removing all elements");
}
-
+
@Test
void testCacheWhenEmpty() {
// The cache is initially empty, so any get operation should return null
assertNull(lruCache.get(1));
}
- @Test
- void testCacheEvictionWhenCapacityExceeded() {
- // Add elements to the cache until it reaches its capacity
- lruCache.put(1, "A");
- lruCache.put(2, "B");
- lruCache.put(3, "C");
-
- // Access an element to change the LRU order
- lruCache.get(1);
-
- // Add another element to trigger eviction
- lruCache.put(4, "D");
-
- // Check that the least recently used element (2) was evicted
- assertNull(lruCache.get(2));
-
- // Check that the other elements are still in the cache
- assertEquals("A", lruCache.get(1));
- assertEquals("C", lruCache.get(3));
- assertEquals("D", lruCache.get(4));
- }
-
@Test
void testCacheClear() {
// Add elements to the cache
@@ -372,4 +386,30 @@ void testCacheClear() {
assertNull(lruCache.get(1));
assertNull(lruCache.get(2));
}
+
+ @Test
+ void testCacheBlast() {
+ // Jam 10M items to the cache
+ lruCache = new LRUCache<>(1000);
+ for (int i = 0; i < 10000000; i++) {
+ lruCache.put(i, "" + i);
+ }
+
+ // Wait until the cache size stabilizes to 1000
+ int expectedSize = 1000;
+ long startTime = System.currentTimeMillis();
+ long timeout = 10000; // wait up to 10 seconds (will never take this long)
+ while (System.currentTimeMillis() - startTime < timeout) {
+ if (lruCache.size() <= expectedSize) {
+ break;
+ }
+ try {
+ Thread.sleep(100); // Check every 100ms
+ System.out.println("Cache size: " + lruCache.size());
+ } catch (InterruptedException ignored) {
+ }
+ }
+
+ assertEquals(1000, lruCache.size());
+ }
}