Skip to content

Commit

Permalink
Remove docIdsToLoad method from SearchContext (elastic#100952)
Browse files Browse the repository at this point in the history
We currently set docIdsToLoad on SearchContext, and then pass the context to
FetchPhase.execute() which reads the docIdsToLoad back out again. This would
be considerably simpler if it was just a parameter on execute().
  • Loading branch information
romseygeek authored Nov 16, 2023
1 parent fc08145 commit 94e510e
Show file tree
Hide file tree
Showing 10 changed files with 11 additions and 74 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,6 @@ final class DefaultSearchContext extends SearchContext {
private Query query;
private ParsedQuery postFilter;
private Query aliasFilter;
private int[] docIdsToLoad;
private SearchContextAggregations aggregations;
private SearchHighlightContext highlight;
private SuggestionSearchContext suggest;
Expand Down Expand Up @@ -727,17 +726,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {
this.seqAndPrimaryTerm = seqNoAndPrimaryTerm;
}

@Override
public int[] docIdsToLoad() {
return docIdsToLoad;
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
this.docIdsToLoad = docIdsToLoad;
return this;
}

@Override
public DfsSearchResult dfsResult() {
return dfsResult;
Expand Down
10 changes: 4 additions & 6 deletions server/src/main/java/org/elasticsearch/search/SearchService.java
Original file line number Diff line number Diff line change
Expand Up @@ -707,8 +707,7 @@ private QueryFetchSearchResult executeFetchPhase(ReaderContext reader, SearchCon
Releasable scope = tracer.withScope(SpanId.forTask(context.getTask()));
SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, afterQueryTime)
) {
shortcutDocIdsToLoad(context);
fetchPhase.execute(context);
fetchPhase.execute(context, shortcutDocIdsToLoad(context));
if (reader.singleSession()) {
freeReaderContext(reader.id());
}
Expand Down Expand Up @@ -857,11 +856,10 @@ public void executeFetchPhase(ShardFetchRequest request, SearchShardTask task, A
}
searchContext.assignRescoreDocIds(readerContext.getRescoreDocIds(request.getRescoreDocIds()));
searchContext.searcher().setAggregatedDfs(readerContext.getAggregatedDfs(request.getAggregatedDfs()));
searchContext.docIdsToLoad(request.docIds());
try (
SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(searchContext, true, System.nanoTime())
) {
fetchPhase.execute(searchContext);
fetchPhase.execute(searchContext, request.docIds());
if (readerContext.singleSession()) {
freeReaderContext(request.contextId());
}
Expand Down Expand Up @@ -1464,7 +1462,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc
* Shortcut ids to load, we load only "from" and up to "size". The phase controller
* handles this as well since the result is always size * shards for Q_T_F
*/
private static void shortcutDocIdsToLoad(SearchContext context) {
private static int[] shortcutDocIdsToLoad(SearchContext context) {
final int[] docIdsToLoad;
int docsOffset = 0;
final Suggest suggest = context.queryResult().suggest();
Expand Down Expand Up @@ -1502,7 +1500,7 @@ private static void shortcutDocIdsToLoad(SearchContext context) {
docIdsToLoad[docsOffset++] = option.getDoc().doc;
}
}
context.docIdsToLoad(docIdsToLoad);
return docIdsToLoad;
}

private static void processScroll(InternalScrollSearchRequest request, SearchContext context) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,7 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE
for (int i = 0; i < topDocs.scoreDocs.length; i++) {
docIdsToLoad[i] = topDocs.scoreDocs[i].doc;
}
subSearchContext.docIdsToLoad(docIdsToLoad);
subSearchContext.fetchPhase().execute(subSearchContext);
subSearchContext.fetchPhase().execute(subSearchContext, docIdsToLoad);
FetchSearchResult fetchResult = subSearchContext.fetchResult();
if (fetchProfiles != null) {
fetchProfiles.add(fetchResult.profileResult());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ public FetchPhase(List<FetchSubPhase> fetchSubPhases) {
this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsPhase(this);
}

public void execute(SearchContext context) {
public void execute(SearchContext context, int[] docIdsToLoad) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("{}", new SearchContextSourcePrinter(context));
}
Expand All @@ -65,7 +65,7 @@ public void execute(SearchContext context) {
throw new TaskCancelledException("cancelled");
}

if (context.docIdsToLoad() == null || context.docIdsToLoad().length == 0) {
if (docIdsToLoad == null || docIdsToLoad.length == 0) {
// no individual hits to process, so we shortcut
SearchHits hits = new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), context.queryResult().getMaxScore());
context.fetchResult().shardResult(hits, null);
Expand All @@ -75,7 +75,7 @@ public void execute(SearchContext context) {
Profiler profiler = context.getProfilers() == null ? Profiler.NOOP : Profilers.startProfilingFetchPhase();
SearchHits hits = null;
try {
hits = buildSearchHits(context, profiler);
hits = buildSearchHits(context, docIdsToLoad, profiler);
} finally {
// Always finish profiling
ProfileResult profileResult = profiler.finish();
Expand All @@ -96,7 +96,7 @@ public Source getSource(LeafReaderContext ctx, int doc) {
}
}

private SearchHits buildSearchHits(SearchContext context, Profiler profiler) {
private SearchHits buildSearchHits(SearchContext context, int[] docIdsToLoad, Profiler profiler) {

FetchContext fetchContext = new FetchContext(context);
SourceLoader sourceLoader = context.newSourceLoader();
Expand Down Expand Up @@ -166,7 +166,7 @@ protected SearchHit nextDoc(int doc) throws IOException {
}
};

SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), context.docIdsToLoad());
SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), docIdsToLoad);

if (context.isCancelled()) {
throw new TaskCancelledException("cancelled");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,10 @@ private void hitExecute(Map<String, InnerHitsContext.InnerHitSubContext> innerHi
for (int j = 0; j < topDoc.topDocs.scoreDocs.length; j++) {
docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc;
}
innerHitsContext.docIdsToLoad(docIdsToLoad);
innerHitsContext.setRootId(hit.getId());
innerHitsContext.setRootLookup(rootSource);

fetchPhase.execute(innerHitsContext);
fetchPhase.execute(innerHitsContext, docIdsToLoad);
FetchSearchResult fetchResult = innerHitsContext.fetchResult();
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();
for (int j = 0; j < internalHits.length; j++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -359,16 +359,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {
in.seqNoAndPrimaryTerm(seqNoAndPrimaryTerm);
}

@Override
public int[] docIdsToLoad() {
return in.docIdsToLoad();
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
return in.docIdsToLoad(docIdsToLoad);
}

@Override
public DfsSearchResult dfsResult() {
return in.dfsResult();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -322,10 +322,6 @@ public Query rewrittenQuery() {
/** controls whether the sequence number and primary term of the last modification to each hit should be returned */
public abstract void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm);

public abstract int[] docIdsToLoad();

public abstract SearchContext docIdsToLoad(int[] docIdsToLoad);

public abstract DfsSearchResult dfsResult();

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,6 @@ public class SubSearchContext extends FilteredSearchContext {
private final FetchSearchResult fetchSearchResult;
private final QuerySearchResult querySearchResult;

private int[] docIdsToLoad;

private StoredFieldsContext storedFields;
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
Expand Down Expand Up @@ -276,17 +274,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {
this.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm;
}

@Override
public int[] docIdsToLoad() {
return docIdsToLoad;
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
this.docIdsToLoad = docIdsToLoad;
return this;
}

@Override
public CollapseContext collapse() {
return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -482,16 +482,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {
throw new UnsupportedOperationException();
}

@Override
public int[] docIdsToLoad() {
throw new UnsupportedOperationException();
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
throw new UnsupportedOperationException();
}

@Override
public DfsSearchResult dfsResult() {
throw new UnsupportedOperationException();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -452,16 +452,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {

}

@Override
public int[] docIdsToLoad() {
return new int[0];
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
return null;
}

@Override
public DfsSearchResult dfsResult() {
return null;
Expand Down

0 comments on commit 94e510e

Please sign in to comment.