Skip to content

Commit

Permalink
Fix some deprecated api usages (#779)
Browse files Browse the repository at this point in the history
  • Loading branch information
aprudhomme authored Nov 7, 2024
1 parent ce5ced1 commit 1dded2a
Show file tree
Hide file tree
Showing 48 changed files with 688 additions and 725 deletions.
2 changes: 1 addition & 1 deletion clientlib/src/main/proto/yelp/nrtsearch/search.proto
Original file line number Diff line number Diff line change
Expand Up @@ -868,7 +868,7 @@ message Facet {
string dim = 1; //Dimension (field)
repeated string paths = 2; //Prefix path to facet 'under'
repeated NumericRangeType numericRange = 3; //Custom numeric ranges. Field must be indexed with facet=numericRange.
bool useOrdsCache = 4; // True if the ordinals cache should be used
bool useOrdsCache = 4 [deprecated = true]; // True if the ordinals cache should be used
repeated string labels = 5; // Specific facet labels to retrieve
int32 topN = 6; //How many top facets to return
Script script = 7; //FacetScript definition to use in place of index facet
Expand Down
946 changes: 474 additions & 472 deletions grpc-gateway/search.pb.go

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -920,10 +920,10 @@ public SortedStrings(SortedSetDocValues docValues) {
public void setDocId(int docID) throws IOException {
values.clear();
if (docValues.advanceExact(docID)) {
long ord = docValues.nextOrd();
while (ord != SortedSetDocValues.NO_MORE_ORDS) {
int count = docValues.docValueCount();
for (int i = 0; i < count; ++i) {
long ord = docValues.nextOrd();
values.add(docValues.lookupOrd(ord).utf8ToString());
ord = docValues.nextOrd();
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@
import org.apache.lucene.facet.sortedset.SortedSetDocValuesFacetCounts;
import org.apache.lucene.facet.taxonomy.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.taxonomy.SearcherTaxonomyManager;
import org.apache.lucene.facet.taxonomy.TaxonomyFacetCounts;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
Expand Down Expand Up @@ -452,7 +451,6 @@ private static com.yelp.nrtsearch.server.grpc.FacetResult getFieldFacetResult(
}

FacetsCollector c = dsDimMap.get(fieldDef.getName());
boolean useCachedOrds = facet.getUseOrdsCache();

Facets luceneFacets;
if (c != null) {
Expand All @@ -461,21 +459,12 @@ private static com.yelp.nrtsearch.server.grpc.FacetResult getFieldFacetResult(
// drill-sideways collector:
String indexFieldName =
indexState.getFacetsConfig().getDimConfig(fieldDef.getName()).indexFieldName;
if (useCachedOrds) {
luceneFacets =
new TaxonomyFacetCounts(
shardState.getOrdsCache(indexFieldName),
searcherAndTaxonomyManager.taxonomyReader,
indexState.getFacetsConfig(),
c);
} else {
luceneFacets =
new FastTaxonomyFacetCounts(
indexFieldName,
searcherAndTaxonomyManager.taxonomyReader,
indexState.getFacetsConfig(),
c);
}
luceneFacets =
new FastTaxonomyFacetCounts(
indexFieldName,
searcherAndTaxonomyManager.taxonomyReader,
indexState.getFacetsConfig(),
c);
} else {

// nocommit test both normal & ssdv facets in same index
Expand All @@ -486,21 +475,12 @@ private static com.yelp.nrtsearch.server.grpc.FacetResult getFieldFacetResult(
indexState.getFacetsConfig().getDimConfig(fieldDef.getName()).indexFieldName;
luceneFacets = indexFieldNameToFacets.get(indexFieldName);
if (luceneFacets == null) {
if (useCachedOrds) {
luceneFacets =
new TaxonomyFacetCounts(
shardState.getOrdsCache(indexFieldName),
searcherAndTaxonomyManager.taxonomyReader,
indexState.getFacetsConfig(),
drillDowns);
} else {
luceneFacets =
new FastTaxonomyFacetCounts(
indexFieldName,
searcherAndTaxonomyManager.taxonomyReader,
indexState.getFacetsConfig(),
drillDowns);
}
luceneFacets =
new FastTaxonomyFacetCounts(
indexFieldName,
searcherAndTaxonomyManager.taxonomyReader,
indexState.getFacetsConfig(),
drillDowns);
indexFieldNameToFacets.put(indexFieldName, luceneFacets);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ public long runIndexingJob() throws Exception {
logger.debug(
String.format(
"running indexing job on threadId: %s",
Thread.currentThread().getName() + Thread.currentThread().getId()));
Thread.currentThread().getName() + Thread.currentThread().threadId()));
Queue<Document> documents = new LinkedBlockingDeque<>();
IndexState indexState;
ShardState shardState;
Expand Down Expand Up @@ -426,7 +426,7 @@ public long runIndexingJob() throws Exception {
logger.warn(
String.format(
"ThreadId: %s, IndexWriter.addDocuments failed",
Thread.currentThread().getName() + Thread.currentThread().getId()));
Thread.currentThread().getName() + Thread.currentThread().threadId()));
throw new IOException(e);
}
} else {
Expand All @@ -449,13 +449,13 @@ public long runIndexingJob() throws Exception {
logger.warn(
String.format(
"ThreadId: %s, IndexWriter.addDocuments failed",
Thread.currentThread().getName() + Thread.currentThread().getId()));
Thread.currentThread().getName() + Thread.currentThread().threadId()));
throw new IOException(e);
}
logger.debug(
String.format(
"indexing job on threadId: %s done with SequenceId: %s",
Thread.currentThread().getName() + Thread.currentThread().getId(),
Thread.currentThread().getName() + Thread.currentThread().threadId(),
shardState.writer.getMaxCompletedSequenceNumber()));
return shardState.writer.getMaxCompletedSequenceNumber();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ private DeleteAllDocumentsResponse handle(IndexState indexState)
} catch (IOException e) {
logger.warn(
"ThreadId: {}, writer.deleteAll failed",
Thread.currentThread().getName() + Thread.currentThread().getId());
Thread.currentThread().getName() + Thread.currentThread().threadId());
throw new DeleteAllDocumentsHandlerException(e);
}
return DeleteAllDocumentsResponse.newBuilder().setGenId(String.valueOf(gen)).build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ private AddDocumentResponse handle(
} catch (IOException e) {
logger.warn(
"ThreadId: {}, writer.deleteDocuments failed",
Thread.currentThread().getName() + Thread.currentThread().getId());
Thread.currentThread().getName() + Thread.currentThread().threadId());
throw new DeleteByQueryHandlerException(e);
}
long genId = shardState.writer.getMaxCompletedSequenceNumber();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ private AddDocumentResponse handleInternal(
} catch (IOException e) {
logger.warn(
"ThreadId: {}, writer.deleteDocuments failed",
Thread.currentThread().getName() + Thread.currentThread().getId());
Thread.currentThread().getName() + Thread.currentThread().threadId());
throw new DeleteDocumentsHandlerException(e);
}
long genId = shardState.writer.getMaxCompletedSequenceNumber();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ private DeleteIndexResponse handle(IndexState indexState) throws DeleteIndexHand
} catch (IOException e) {
logger.warn(
"ThreadId: {}, deleteIndex failed",
Thread.currentThread().getName() + Thread.currentThread().getId());
Thread.currentThread().getName() + Thread.currentThread().threadId());
throw new DeleteIndexHandlerException(e);
}
return DeleteIndexResponse.newBuilder().setOk("ok").build();
Expand Down
80 changes: 63 additions & 17 deletions src/main/java/com/yelp/nrtsearch/server/handler/SearchHandler.java
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,16 @@
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.*;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.DrillDownQuery;
import org.apache.lucene.facet.DrillSideways;
import org.apache.lucene.facet.taxonomy.SearcherTaxonomyManager;
Expand Down Expand Up @@ -779,12 +782,15 @@ private List<Map<String, CompositeFieldValue>> fillFields(
for (int hitIndex = 0; hitIndex < hitBuilders.size(); ++hitIndex) {
values.add(new HashMap<>());
}

StoredFields storedFields = s.storedFields();
for (String field : fields) {
for (int hitIndex = 0; hitIndex < hitBuilders.size(); ++hitIndex) {
var hitResponse = hitBuilders.get(hitIndex);
LeafReaderContext leaf = hitIdToleaves.get(hitIndex);
CompositeFieldValue v =
getFieldForHit(s, hitResponse, leaf, field, searchContext.getRetrieveFields());
getFieldForHit(
s, hitResponse, leaf, field, searchContext.getRetrieveFields(), storedFields);
values.get(hitIndex).put(field, v);
}
}
Expand All @@ -801,7 +807,8 @@ private CompositeFieldValue getFieldForHit(
Hit.Builder hit,
LeafReaderContext leaf,
String field,
Map<String, FieldDef> dynamicFields)
Map<String, FieldDef> dynamicFields,
StoredFields storedFields)
throws IOException {
assert field != null;
CompositeFieldValue.Builder compositeFieldValue = CompositeFieldValue.newBuilder();
Expand Down Expand Up @@ -854,7 +861,8 @@ public boolean advanceExact(int doc) throws IOException {

// retrieve stored fields
case IndexableFieldDef<?> indexableFieldDef when indexableFieldDef.isStored() -> {
IndexableField[] values = s.doc(hit.getLuceneDocId()).getFields(field);
IndexableField[] values =
storedFields.document(hit.getLuceneDocId(), Set.of(field)).getFields(field);
for (IndexableField fieldValue : values) {
compositeFieldValue.addFieldValue(
indexableFieldDef.getStoredFieldValue(fieldValue.storedValue()));
Expand All @@ -881,6 +889,31 @@ public static class FillDocsTask implements Runnable {
private final List<Hit.Builder> docChunk;
private final Query explainQuery;

record NameAndFieldDef(String name, IndexableFieldDef<?> fieldDef) {}

record StoredFieldFetchContext(
StoredFields storedFields,
Set<String> fieldNames,
List<NameAndFieldDef> nameAndFieldDefs) {}

static StoredFieldFetchContext getStoredFieldFetchContext(FieldFetchContext fieldFetchContext)
throws IOException {
List<NameAndFieldDef> storedFieldEntries = new ArrayList<>();
Set<String> storedFieldNames = new HashSet<>();
for (Map.Entry<String, FieldDef> fieldDefEntry :
fieldFetchContext.getRetrieveFields().entrySet()) {
if (fieldDefEntry.getValue() instanceof IndexableFieldDef<?> indexableFieldDef
&& indexableFieldDef.isStored()) {
storedFieldEntries.add(new NameAndFieldDef(fieldDefEntry.getKey(), indexableFieldDef));
storedFieldNames.add(fieldDefEntry.getKey());
}
}
return new StoredFieldFetchContext(
fieldFetchContext.getSearcherAndTaxonomy().searcher.storedFields(),
storedFieldNames,
storedFieldEntries);
}

/**
* Constructor.
*
Expand All @@ -902,6 +935,13 @@ public FillDocsTask(
public void run() {
List<LeafReaderContext> leaves =
fieldFetchContext.getSearcherAndTaxonomy().searcher.getIndexReader().leaves();
StoredFieldFetchContext storedFieldFetchContext;
try {
storedFieldFetchContext = getStoredFieldFetchContext(fieldFetchContext);
} catch (IOException e) {
throw new RuntimeException(e);
}

int hitIndex = 0;
// process documents, grouped by lucene segment
while (hitIndex < docChunk.size()) {
Expand All @@ -911,7 +951,8 @@ public void run() {
// get all hits in the same segment and process them together for better resource reuse
List<Hit.Builder> sliceHits = getSliceHits(docChunk, hitIndex, sliceSegment);
try {
fetchSlice(fieldFetchContext, explainQuery, sliceHits, sliceSegment);
fetchSlice(
fieldFetchContext, explainQuery, sliceHits, sliceSegment, storedFieldFetchContext);
} catch (IOException e) {
throw new RuntimeException("Error fetching field data", e);
}
Expand Down Expand Up @@ -949,7 +990,8 @@ private static void fetchSlice(
FieldFetchContext context,
Query explainQuery,
List<SearchResponse.Hit.Builder> sliceHits,
LeafReaderContext sliceSegment)
LeafReaderContext sliceSegment,
StoredFieldFetchContext storedFieldFetchContext)
throws IOException {
for (Map.Entry<String, FieldDef> fieldDefEntry : context.getRetrieveFields().entrySet()) {
switch (fieldDefEntry.getValue()) {
Expand All @@ -962,9 +1004,7 @@ private static void fetchSlice(
case IndexableFieldDef<?> indexableFieldDef -> {
if (indexableFieldDef.hasDocValues()) {
fetchFromDocVales(sliceHits, sliceSegment, fieldDefEntry.getKey(), indexableFieldDef);
} else if (indexableFieldDef.isStored()) {
fetchFromStored(context, sliceHits, fieldDefEntry.getKey(), indexableFieldDef);
} else {
} else if (!indexableFieldDef.isStored()) {
throw new IllegalStateException(
"No valid method to retrieve indexable field: " + fieldDefEntry.getKey());
}
Expand All @@ -975,6 +1015,16 @@ private static void fetchSlice(
}
}

// fetch stored fields by document
if (!storedFieldFetchContext.fieldNames.isEmpty()) {
for (Hit.Builder hit : sliceHits) {
Document document =
storedFieldFetchContext.storedFields.document(
hit.getLuceneDocId(), storedFieldFetchContext.fieldNames);
fetchFromStored(hit, storedFieldFetchContext.nameAndFieldDefs, document);
}
}

// execute any per hit fetch tasks
Query resolvedExplainQuery = null;
if (context.isExplain()) {
Expand Down Expand Up @@ -1066,19 +1116,15 @@ private static void fetchFromDocVales(

/** Fetch field value stored in the index */
private static void fetchFromStored(
FieldFetchContext context,
List<SearchResponse.Hit.Builder> sliceHits,
String name,
IndexableFieldDef<?> indexableFieldDef)
throws IOException {
for (SearchResponse.Hit.Builder hit : sliceHits) {
Hit.Builder hit, List<NameAndFieldDef> nameAndFieldDefs, Document document) {
for (NameAndFieldDef nameAndFieldDef : nameAndFieldDefs) {
String name = nameAndFieldDef.name();
IndexableField[] values = document.getFields(name);
SearchResponse.Hit.CompositeFieldValue.Builder compositeFieldValue =
SearchResponse.Hit.CompositeFieldValue.newBuilder();
IndexableField[] values =
context.getSearcherAndTaxonomy().searcher.doc(hit.getLuceneDocId()).getFields(name);
for (IndexableField fieldValue : values) {
compositeFieldValue.addFieldValue(
indexableFieldDef.getStoredFieldValue(fieldValue.storedValue()));
nameAndFieldDef.fieldDef().getStoredFieldValue(fieldValue.storedValue()));
}
hit.putFields(name, compositeFieldValue.build());
}
Expand Down
22 changes: 2 additions & 20 deletions src/main/java/com/yelp/nrtsearch/server/index/ShardState.java
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,6 @@
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.sortedset.DefaultSortedSetDocValuesReaderState;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.CachedOrdinalsReader;
import org.apache.lucene.facet.taxonomy.DocValuesOrdinalsReader;
import org.apache.lucene.facet.taxonomy.OrdinalsReader;
import org.apache.lucene.facet.taxonomy.SearcherTaxonomyManager;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.DirectoryReader;
Expand Down Expand Up @@ -127,12 +124,6 @@ public class ShardState implements Closeable {
/** Maps snapshot gen -&gt; version. */
public final Map<Long, Long> snapshotGenToVersion = new ConcurrentHashMap<>();

/**
* Holds cached ordinals; doesn't use any RAM unless it's actually used when a caller sets
* useOrdsCache=true.
*/
private final Map<String, OrdinalsReader> ordsCache = new HashMap<>();

/**
* Enables lookup of previously used searchers, so follow-on actions (next page, drill
* down/sideways/up, etc.) use the same searcher as the original search, as long as that searcher
Expand Down Expand Up @@ -836,16 +827,6 @@ public IndexSearcher newSearcher(IndexReader r, IndexReader previousReader) {
}
};

/** Returns cached ordinals for the specified index field name. */
public synchronized OrdinalsReader getOrdsCache(String indexFieldName) {
OrdinalsReader ords = ordsCache.get(indexFieldName);
if (ords == null) {
ords = new CachedOrdinalsReader(new DocValuesOrdinalsReader(indexFieldName));
ordsCache.put(indexFieldName, ords);
}
return ords;
}

public SortedSetDocValuesReaderState getSSDVState(
IndexState indexState, SearcherTaxonomyManager.SearcherAndTaxonomy s, FieldDef fd)
throws IOException {
Expand Down Expand Up @@ -884,7 +865,8 @@ public SortedSetDocValuesReaderState getSSDVStateForReader(
}
if (ssdvState == null) {
ssdvState =
new DefaultSortedSetDocValuesReaderState(reader, dimConfig.indexFieldName) {
new DefaultSortedSetDocValuesReaderState(
reader, dimConfig.indexFieldName, indexState.getFacetsConfig()) {
@Override
public SortedSetDocValues getDocValues() throws IOException {
SortedSetDocValues values = super.getDocValues();
Expand Down
Loading

0 comments on commit 1dded2a

Please sign in to comment.