Skip to content

Commit

Permalink
Fix NPE in ConcurrentQueryProfile while computing the breakdown map f…
Browse files Browse the repository at this point in the history
…or slices (#10111)

* Fix NPE in ConcurrentQueryProfile while computing the breakdown map for slices.

There can be cases where one or more slice may not have timing related information for its leaves in
contexts map. During creation of slice and query level breakdown map it needs to handle such cases by using
default values correctly. Also updating the min/max/avg sliceNodeTime to not include time to create
weight and wait times by slice threads. It will reflect the min/max/avg execution time of each slice
whereas totalNodeTime will reflect the total query time.

Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com>

* Address review comments

Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com>

---------

Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com>
  • Loading branch information
sohami committed Sep 21, 2023
1 parent b944194 commit 9f0e017
Show file tree
Hide file tree
Showing 2 changed files with 147 additions and 40 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Collector;
import org.opensearch.OpenSearchException;
import org.opensearch.search.profile.AbstractProfileBreakdown;
import org.opensearch.search.profile.ContextualProfileBreakdown;

Expand Down Expand Up @@ -73,14 +74,14 @@ public Map<String, Long> toBreakdownMap() {
// If there are no leaf contexts, then return the default concurrent query level breakdown, which will include the
// create_weight time/count
queryNodeTime = createWeightTime;
maxSliceNodeTime = queryNodeTime;
minSliceNodeTime = queryNodeTime;
avgSliceNodeTime = queryNodeTime;
maxSliceNodeTime = 0L;
minSliceNodeTime = 0L;
avgSliceNodeTime = 0L;
return buildDefaultQueryBreakdownMap(createWeightTime);
}

// first create the slice level breakdowns
final Map<Collector, Map<String, Long>> sliceLevelBreakdowns = buildSliceLevelBreakdown(createWeightStartTime);
final Map<Collector, Map<String, Long>> sliceLevelBreakdowns = buildSliceLevelBreakdown();
return buildQueryBreakdownMap(sliceLevelBreakdowns, createWeightTime, createWeightStartTime);
}

Expand Down Expand Up @@ -124,18 +125,19 @@ private Map<String, Long> buildDefaultQueryBreakdownMap(long createWeightTime) {
/**
* Computes the slice level breakdownMap. It uses sliceCollectorsToLeaves to figure out all the leaves or segments part of a slice.
* Then use the breakdown timing stats for each of these leaves to calculate the breakdown stats at slice level.
* @param createWeightStartTime start time when createWeight is called
*
* @return map of collector (or slice) to breakdown map
*/
Map<Collector, Map<String, Long>> buildSliceLevelBreakdown(long createWeightStartTime) {
Map<Collector, Map<String, Long>> buildSliceLevelBreakdown() {
final Map<Collector, Map<String, Long>> sliceLevelBreakdowns = new HashMap<>();
long totalSliceNodeTime = 0;
long totalSliceNodeTime = 0L;
for (Map.Entry<Collector, List<LeafReaderContext>> slice : sliceCollectorsToLeaves.entrySet()) {
final Collector sliceCollector = slice.getKey();
// initialize each slice level breakdown
final Map<String, Long> currentSliceBreakdown = sliceLevelBreakdowns.computeIfAbsent(sliceCollector, k -> new HashMap<>());
// max slice end time across all timing types
long sliceMaxEndTime = Long.MIN_VALUE;
long sliceMinStartTime = Long.MAX_VALUE;
for (QueryTimingType timingType : QueryTimingType.values()) {
if (timingType.equals(QueryTimingType.CREATE_WEIGHT)) {
// do nothing for create weight as that is query level time and not slice level
Expand All @@ -155,6 +157,12 @@ Map<Collector, Map<String, Long>> buildSliceLevelBreakdown(long createWeightStar
// leaf, but the leaf level breakdown will not be created in the contexts map.
// This is because before updating the contexts map, the query hits earlyTerminationException.
// To handle such case, we will ignore the leaf that is not present.
//
// Other than early termination, it can also happen in other cases. For example: there is a must boolean query
// with 2 boolean clauses. While creating scorer for first clause if no docs are found for the field in a leaf
// context then it will return null scorer. Then for 2nd clause weight as well no scorer will be created for this
// leaf context (as it is a must query). Due to this it will end up missing the leaf context in the contexts map
// for second clause weight.
continue;
}
final Map<String, Long> currentSliceLeafBreakdownMap = contexts.get(sliceLeaf).toBreakdownMap();
Expand Down Expand Up @@ -182,15 +190,36 @@ Map<Collector, Map<String, Long>> buildSliceLevelBreakdown(long createWeightStar
);
}
// compute sliceMaxEndTime as max of sliceEndTime across all timing types
sliceMaxEndTime = Math.max(sliceMaxEndTime, currentSliceBreakdown.get(timingTypeSliceEndTimeKey));
sliceMaxEndTime = Math.max(sliceMaxEndTime, currentSliceBreakdown.getOrDefault(timingTypeSliceEndTimeKey, Long.MIN_VALUE));
sliceMinStartTime = Math.min(
sliceMinStartTime,
currentSliceBreakdown.getOrDefault(timingTypeSliceStartTimeKey, Long.MAX_VALUE)
);
// compute total time for each timing type at slice level using sliceEndTime and sliceStartTime
currentSliceBreakdown.put(
timingType.toString(),
currentSliceBreakdown.get(timingTypeSliceEndTimeKey) - currentSliceBreakdown.get(timingTypeSliceStartTimeKey)
currentSliceBreakdown.getOrDefault(timingTypeSliceEndTimeKey, 0L) - currentSliceBreakdown.getOrDefault(
timingTypeSliceStartTimeKey,
0L
)
);
}
// currentSliceNodeTime includes the create weight time as well which will be same for all the slices
long currentSliceNodeTime = sliceMaxEndTime - createWeightStartTime;
// currentSliceNodeTime does not include the create weight time, as that is computed in non-concurrent part
long currentSliceNodeTime;
if (sliceMinStartTime == Long.MAX_VALUE && sliceMaxEndTime == Long.MIN_VALUE) {
currentSliceNodeTime = 0L;
} else if (sliceMinStartTime == Long.MAX_VALUE || sliceMaxEndTime == Long.MIN_VALUE) {
throw new OpenSearchException(
"Unexpected value of sliceMinStartTime ["
+ sliceMinStartTime
+ "] or sliceMaxEndTime ["
+ sliceMaxEndTime
+ "] while computing the slice level timing profile breakdowns"
);
} else {
currentSliceNodeTime = sliceMaxEndTime - sliceMinStartTime;
}

// compute max/min slice times
maxSliceNodeTime = Math.max(maxSliceNodeTime, currentSliceNodeTime);
minSliceNodeTime = Math.min(minSliceNodeTime, currentSliceNodeTime);
Expand Down Expand Up @@ -245,8 +274,8 @@ public Map<String, Long> buildQueryBreakdownMap(

// for all other timing types, we will compute min/max/avg/total across slices
for (Map.Entry<Collector, Map<String, Long>> sliceBreakdown : sliceLevelBreakdowns.entrySet()) {
Long sliceBreakdownTypeTime = sliceBreakdown.getValue().get(timingTypeKey);
Long sliceBreakdownTypeCount = sliceBreakdown.getValue().get(timingTypeCountKey);
long sliceBreakdownTypeTime = sliceBreakdown.getValue().getOrDefault(timingTypeKey, 0L);
long sliceBreakdownTypeCount = sliceBreakdown.getValue().getOrDefault(timingTypeCountKey, 0L);
// compute max/min/avg TimingType time across slices
queryBreakdownMap.compute(
maxBreakdownTypeTime,
Expand Down Expand Up @@ -276,17 +305,38 @@ public Map<String, Long> buildQueryBreakdownMap(
);

// query start/end time for a TimingType is min/max of start/end time across slices for that TimingType
queryTimingTypeEndTime = Math.max(queryTimingTypeEndTime, sliceBreakdown.getValue().get(sliceEndTimeForTimingType));
queryTimingTypeStartTime = Math.min(queryTimingTypeStartTime, sliceBreakdown.getValue().get(sliceStartTimeForTimingType));
queryTimingTypeEndTime = Math.max(
queryTimingTypeEndTime,
sliceBreakdown.getValue().getOrDefault(sliceEndTimeForTimingType, Long.MIN_VALUE)
);
queryTimingTypeStartTime = Math.min(
queryTimingTypeStartTime,
sliceBreakdown.getValue().getOrDefault(sliceStartTimeForTimingType, Long.MAX_VALUE)
);
queryTimingTypeCount += sliceBreakdownTypeCount;
}

if (queryTimingTypeStartTime == Long.MAX_VALUE || queryTimingTypeEndTime == Long.MIN_VALUE) {
throw new OpenSearchException(
"Unexpected timing type ["
+ timingTypeKey
+ "] start ["
+ queryTimingTypeStartTime
+ "] or end time ["
+ queryTimingTypeEndTime
+ "] computed across slices for profile results"
);
}
queryBreakdownMap.put(timingTypeKey, queryTimingTypeEndTime - queryTimingTypeStartTime);
queryBreakdownMap.put(timingTypeCountKey, queryTimingTypeCount);
queryBreakdownMap.compute(avgBreakdownTypeTime, (key, value) -> (value == null) ? 0 : value / sliceLevelBreakdowns.size());
queryBreakdownMap.compute(avgBreakdownTypeCount, (key, value) -> (value == null) ? 0 : value / sliceLevelBreakdowns.size());
queryBreakdownMap.compute(avgBreakdownTypeTime, (key, value) -> (value == null) ? 0L : value / sliceLevelBreakdowns.size());
queryBreakdownMap.compute(avgBreakdownTypeCount, (key, value) -> (value == null) ? 0L : value / sliceLevelBreakdowns.size());
// compute query end time using max of query end time across all timing types
queryEndTime = Math.max(queryEndTime, queryTimingTypeEndTime);
}
if (queryEndTime == Long.MIN_VALUE) {
throw new OpenSearchException("Unexpected error while computing the query end time across slices in profile result");
}
queryNodeTime = queryEndTime - createWeightStartTime;
return queryBreakdownMap;
}
Expand Down
Loading

0 comments on commit 9f0e017

Please sign in to comment.