Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OAK-11350 : removed usage of Guava's Maps.filterKeys #1956

Merged
merged 2 commits into from
Jan 8, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@
import org.apache.jackrabbit.guava.common.cache.Cache;
import org.apache.jackrabbit.guava.common.cache.CacheBuilder;
import org.apache.jackrabbit.guava.common.collect.AbstractIterator;
import org.apache.jackrabbit.guava.common.collect.Maps;

import static org.apache.jackrabbit.oak.commons.conditions.Validate.checkArgument;
import static org.apache.jackrabbit.guava.common.collect.Iterables.filter;
Expand Down Expand Up @@ -251,7 +250,7 @@ public void init() throws DataStoreException {
LOG.error("Error ", e);
Map<String, Object> filteredMap = new HashMap<>();
if (properties != null) {
filteredMap = Maps.filterKeys(Utils.asMap(properties),
filteredMap = CollectionUtils.filterKeys(Utils.asMap(properties),
input -> !input.equals(S3Constants.ACCESS_KEY) &&!input.equals(S3Constants.SECRET_KEY));
}
throw new DataStoreException("Could not initialize S3 from " + filteredMap, e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
Expand Down Expand Up @@ -416,6 +417,27 @@ public static Map<String, String> fromProperties(final @NotNull Properties prope
e -> String.valueOf(e.getValue())));
}

/**
* Create a new {@link Map} after filtering the entries of the given map
* based on the specified predicate applied to the keys.
*
* @param <K> the type of keys in the map
* @param <V> the type of values in the map
* @param map the map to filter, must not be null
* @param predicate the predicate to apply to the keys, must not be null
* @return a new map containing only the entries whose keys match the predicate
* @throws NullPointerException if the map or predicate is null
*/
@NotNull
public static <K,V> Map<K, V> filterKeys(final @NotNull Map<K, V> map, final @NotNull Predicate<? super K> predicate) {
Objects.requireNonNull(map);
Objects.requireNonNull(predicate);
return map.entrySet()
.stream()
.filter(e -> predicate.test(e.getKey())) // using LinkedHashMap to maintain the order of previous map
.collect(LinkedHashMap::new, (m, v)->m.put(v.getKey(), v.getValue()), LinkedHashMap::putAll);
}

/**
* Convert an {@code Iterator} to an {@code Iterable}.
* <p>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
Expand All @@ -33,6 +34,7 @@
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;

Expand Down Expand Up @@ -558,6 +560,80 @@ public void testFromPropertiesNull() {
Assert.assertThrows(NullPointerException.class, () -> CollectionUtils.fromProperties(null));
}

@Test
public void testFilterKeys() {
final Map<String, Integer> map = new HashMap<>();
map.put("one", 1);
map.put("two", 2);
map.put("three", 3);

final Predicate<String> predicate = key -> key.startsWith("t");

final Map<String, Integer> result = CollectionUtils.filterKeys(map, predicate);

Assert.assertEquals(2, result.size());
Assert.assertTrue(result.containsKey("two"));
Assert.assertTrue(result.containsKey("three"));
Assert.assertFalse(result.containsKey("one"));
}

@Test
public void testFilterKeysEmptyMap() {
final Map<String, Integer> map = new HashMap<>();
final Predicate<String> predicate = key -> key.startsWith("t");

final Map<String, Integer> result = CollectionUtils.filterKeys(map, predicate);

Assert.assertTrue(result.isEmpty());
}

@Test
public void testFilterNullKeys() {
final Map<String, Integer> map = new HashMap<>();
map.put("one", 1);
map.put("two", 2);
map.put("three", 3);
map.put(null, 4);

final Predicate<String> predicate = Objects::isNull;

final Map<String, Integer> result = CollectionUtils.filterKeys(map, predicate);

Assert.assertEquals(1, result.size());
}

@Test
public void testFilterNonNullKeys() {
final Map<String, Integer> map = new HashMap<>();
map.put("one", 1);
map.put("two", 2);
map.put("three", 3);
map.put(null, 4);

final Predicate<String> predicate = Objects::nonNull;

final Map<String, Integer> result = CollectionUtils.filterKeys(map, predicate);

Assert.assertEquals(3, result.size());
}

@Test
public void testFilterKeysNullMap() {
final Predicate<String> predicate = key -> key.startsWith("t");

Assert.assertThrows(NullPointerException.class, () -> CollectionUtils.filterKeys(null, predicate));
}

@Test
public void testFilterKeysNullPredicate() {
final Map<String, Integer> map = new HashMap<>();
map.put("one", 1);
map.put("two", 2);
map.put("three", 3);

Assert.assertThrows(NullPointerException.class, () -> CollectionUtils.filterKeys(map, null));
}

@Test
public void ensureCapacity() {
int capacity = CollectionUtils.ensureCapacity(8);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.apache.jackrabbit.guava.common.collect.Maps;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.commons.PerfLogger;
import org.apache.jackrabbit.oak.commons.collections.CollectionUtils;
import org.apache.jackrabbit.oak.plugins.index.AsyncIndexInfoService;
import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndexFactory;
import org.apache.jackrabbit.oak.plugins.index.lucene.reader.DefaultIndexReaderFactory;
Expand All @@ -49,8 +50,6 @@
import org.slf4j.LoggerFactory;

import static java.util.Objects.requireNonNull;


import static java.util.Collections.emptyMap;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.TYPE_LUCENE;
import static org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneIndexHelper.isLuceneIndexNode;
Expand Down Expand Up @@ -186,7 +185,7 @@ public void leave(NodeState before, NodeState after) {

if (!updates.isEmpty()) {
Map<String, LuceneIndexNodeManager> builder = new HashMap<>();
builder.putAll(Maps.filterKeys(original, x -> !updates.keySet().contains(x)));
builder.putAll(CollectionUtils.filterKeys(original, x -> !updates.containsKey(x)));
builder.putAll(Maps.filterValues(updates, x -> x != null));
indices = Collections.unmodifiableMap(builder);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.jackrabbit.guava.common.collect.Iterables;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.commons.PerfLogger;
import org.apache.jackrabbit.oak.commons.collections.CollectionUtils;
import org.apache.jackrabbit.oak.plugins.index.AsyncIndexInfoService;
import org.apache.jackrabbit.oak.plugins.index.search.BadIndexTracker;
import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition;
Expand All @@ -45,7 +46,6 @@

import static java.util.Objects.requireNonNull;

import static org.apache.jackrabbit.guava.common.collect.Maps.filterKeys;
import static org.apache.jackrabbit.guava.common.collect.Maps.filterValues;
import static java.util.Collections.emptyMap;
import static org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition.INDEX_DEFINITION_NODE;
Expand Down Expand Up @@ -148,7 +148,7 @@ public void leave(NodeState before, NodeState after) {

if (!updates.isEmpty()) {
Map<String, I> builder = new HashMap<>();
builder.putAll(filterKeys(original, x -> !updates.keySet().contains(x)));
builder.putAll(CollectionUtils.filterKeys(original, x -> !updates.containsKey(x)));
builder.putAll(filterValues(updates, x -> x != null));
indices = Collections.unmodifiableMap(builder);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import static org.apache.jackrabbit.guava.common.collect.Iterables.filter;
import static org.apache.jackrabbit.guava.common.collect.Iterables.transform;
import static org.apache.jackrabbit.guava.common.collect.Maps.filterKeys;
import static java.util.Collections.singletonList;
import static org.apache.jackrabbit.oak.plugins.document.util.Utils.asISO8601;
import static org.apache.jackrabbit.oak.plugins.document.Collection.JOURNAL;
Expand All @@ -46,6 +45,7 @@

import org.apache.jackrabbit.guava.common.collect.Iterables;
import org.apache.jackrabbit.oak.commons.TimeDurationFormatter;
import org.apache.jackrabbit.oak.commons.collections.CollectionUtils;
import org.apache.jackrabbit.oak.commons.properties.SystemPropertySupplier;
import org.apache.jackrabbit.oak.plugins.document.bundlor.DocumentBundlor;
import org.apache.jackrabbit.oak.plugins.document.cache.CacheInvalidationStats;
Expand Down Expand Up @@ -726,7 +726,7 @@ private Revision determineLastModification(NodeDocument doc, int clusterId) {
for (String property : doc.keySet().stream().filter(PROPERTY_OR_DELETED).collect(Collectors.toSet())) {
Map<Revision, String> valueMap = doc.getLocalMap(property);
// collect committed changes of this cluster node
for (Map.Entry<Revision, String> entry : filterKeys(valueMap, cp::test).entrySet()) {
for (Map.Entry<Revision, String> entry : CollectionUtils.filterKeys(valueMap, cp).entrySet()) {
Revision rev = entry.getKey();
String cv = revisionContext.getCommitValue(rev, doc);
if (isCommitted(cv)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.stream.Collectors.toList;
import static org.apache.jackrabbit.guava.common.collect.Iterables.filter;
import static org.apache.jackrabbit.guava.common.collect.Maps.filterKeys;
import static com.mongodb.client.model.Projections.include;
import static java.lang.Integer.MAX_VALUE;
import static java.util.Collections.emptyList;
Expand Down Expand Up @@ -1487,7 +1486,7 @@ private <T extends Document> Map<UpdateOp, T> bulkUpdate(Collection<T> collectio

if (collection == Collection.NODES) {
List<NodeDocument> docsToCache = new ArrayList<>();
for (UpdateOp op : filterKeys(bulkOperations, x -> bulkResult.upserts.contains(x)).values()) {
for (UpdateOp op : CollectionUtils.filterKeys(bulkOperations, bulkResult.upserts::contains).values()) {
NodeDocument doc = Collection.NODES.newDocument(this);
UpdateUtils.applyChanges(doc, op);
docsToCache.add(doc);
Expand Down
Loading