diff --git a/src/main/java/com/cedarsoftware/util/LRUCache.java b/src/main/java/com/cedarsoftware/util/LRUCache.java
index c733f083..34734222 100644
--- a/src/main/java/com/cedarsoftware/util/LRUCache.java
+++ b/src/main/java/com/cedarsoftware/util/LRUCache.java
@@ -9,16 +9,16 @@
import com.cedarsoftware.util.cache.ThreadedLRUCacheStrategy;
/**
- * This class provides a thread-safe Least Recently Used (LRU) cache API that will evict the least recently used items,
- * once a threshold is met. It implements the Map
interface for convenience.
+ * This class provides a thread-safe Least Recently Used (LRU) cache API that evicts the least recently used items once
+ * a threshold is met. It implements the Map
interface for convenience.
*
- * This class provides two implementation strategies: a locking approach and a threaded approach. + * This class offers two implementation strategies: a locking approach and a threaded approach. *
* The Locking strategy allows for O(1) access for get(), put(), and remove(). For put(), remove(), and many other @@ -31,7 +31,7 @@ * with cleaning up items above the capacity threshold. This means that the cache may temporarily exceed its capacity, but * it will soon be trimmed back to the capacity limit by the scheduled thread. *
- * LRUCache supports null
for both key or value.
+ * LRUCache supports null
for both key and value.
*
* Special Thanks: This implementation was inspired by insights and suggestions from Ben Manes.
* @see LockingLRUCacheStrategy
diff --git a/src/main/java/com/cedarsoftware/util/cache/LockingLRUCacheStrategy.java b/src/main/java/com/cedarsoftware/util/cache/LockingLRUCacheStrategy.java
index 90ef2002..a4aca999 100644
--- a/src/main/java/com/cedarsoftware/util/cache/LockingLRUCacheStrategy.java
+++ b/src/main/java/com/cedarsoftware/util/cache/LockingLRUCacheStrategy.java
@@ -9,7 +9,7 @@
import java.util.concurrent.locks.ReentrantLock;
/**
- * This class provides a thread-safe Least Recently Used (LRU) cache API that will evict the least recently used items,
+ * This class provides a thread-safe Least Recently Used (LRU) cache API that evicts the least recently used items
* once a threshold is met. It implements the Map
interface for convenience.
*
* The Locking strategy allows for O(1) access for get(), put(), and remove(). For put(), remove(), and many other @@ -17,7 +17,7 @@ * This 'try-lock' approach ensures that the get() API is never blocking, but it also means that the LRU order is not * perfectly maintained under heavy load. *
- * LRUCache supports null
for both key or value.
+ * LRUCache supports null
for both key and value.
* @author John DeRegnaucourt (jdereg@gmail.com)
*
* Copyright (c) Cedar Software LLC
diff --git a/src/main/java/com/cedarsoftware/util/cache/ThreadedLRUCacheStrategy.java b/src/main/java/com/cedarsoftware/util/cache/ThreadedLRUCacheStrategy.java
index 4d4ef1a1..30b83273 100644
--- a/src/main/java/com/cedarsoftware/util/cache/ThreadedLRUCacheStrategy.java
+++ b/src/main/java/com/cedarsoftware/util/cache/ThreadedLRUCacheStrategy.java
@@ -16,7 +16,7 @@
import java.util.concurrent.atomic.AtomicBoolean;
/**
- * This class provides a thread-safe Least Recently Used (LRU) cache API that will evict the least recently used items,
+ * This class provides a thread-safe Least Recently Used (LRU) cache API that evicts the least recently used items
* once a threshold is met. It implements the Map
interface for convenience.
*
* The Threaded strategy allows for O(1) access for get(), put(), and remove() without blocking. It uses a ConcurrentHashMap
@@ -24,7 +24,7 @@
* with cleaning up items above the capacity threshold. This means that the cache may temporarily exceed its capacity, but
* it will soon be trimmed back to the capacity limit by the scheduled thread.
*
- * LRUCache supports null
for both key or value.
+ * LRUCache supports null
for both key and value.
*
* @author John DeRegnaucourt (jdereg@gmail.com)
*