Skip to content

Commit

Permalink
Merge branch 'main' into clean-ali-inference-code
Browse files Browse the repository at this point in the history
  • Loading branch information
jonathan-buttner authored Jan 13, 2025
2 parents 3b7439b + 53773cb commit 182e96e
Show file tree
Hide file tree
Showing 202 changed files with 2,701 additions and 1,119 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.expression.FieldAttribute;
import org.elasticsearch.xpack.esql.core.expression.FoldContext;
import org.elasticsearch.xpack.esql.core.expression.Literal;
import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern;
import org.elasticsearch.xpack.esql.core.tree.Source;
Expand Down Expand Up @@ -71,12 +72,11 @@ public class EvalBenchmark {
BigArrays.NON_RECYCLING_INSTANCE
);

private static final FoldContext FOLD_CONTEXT = FoldContext.small();

private static final int BLOCK_LENGTH = 8 * 1024;

static final DriverContext driverContext = new DriverContext(
BigArrays.NON_RECYCLING_INSTANCE,
BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE)
);
static final DriverContext driverContext = new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, blockFactory);

static {
// Smoke test all the expected values and force loading subclasses more like prod
Expand Down Expand Up @@ -114,18 +114,20 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) {
return switch (operation) {
case "abs" -> {
FieldAttribute longField = longField();
yield EvalMapper.toEvaluator(new Abs(Source.EMPTY, longField), layout(longField)).get(driverContext);
yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Abs(Source.EMPTY, longField), layout(longField)).get(driverContext);
}
case "add" -> {
FieldAttribute longField = longField();
yield EvalMapper.toEvaluator(
FOLD_CONTEXT,
new Add(Source.EMPTY, longField, new Literal(Source.EMPTY, 1L, DataType.LONG)),
layout(longField)
).get(driverContext);
}
case "add_double" -> {
FieldAttribute doubleField = doubleField();
yield EvalMapper.toEvaluator(
FOLD_CONTEXT,
new Add(Source.EMPTY, doubleField, new Literal(Source.EMPTY, 1D, DataType.DOUBLE)),
layout(doubleField)
).get(driverContext);
Expand All @@ -140,7 +142,8 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) {
lhs = new Add(Source.EMPTY, lhs, new Literal(Source.EMPTY, 1L, DataType.LONG));
rhs = new Add(Source.EMPTY, rhs, new Literal(Source.EMPTY, 1L, DataType.LONG));
}
yield EvalMapper.toEvaluator(new Case(Source.EMPTY, condition, List.of(lhs, rhs)), layout(f1, f2)).get(driverContext);
yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Case(Source.EMPTY, condition, List.of(lhs, rhs)), layout(f1, f2))
.get(driverContext);
}
case "date_trunc" -> {
FieldAttribute timestamp = new FieldAttribute(
Expand All @@ -149,35 +152,37 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) {
new EsField("timestamp", DataType.DATETIME, Map.of(), true)
);
yield EvalMapper.toEvaluator(
FOLD_CONTEXT,
new DateTrunc(Source.EMPTY, new Literal(Source.EMPTY, Duration.ofHours(24), DataType.TIME_DURATION), timestamp),
layout(timestamp)
).get(driverContext);
}
case "equal_to_const" -> {
FieldAttribute longField = longField();
yield EvalMapper.toEvaluator(
FOLD_CONTEXT,
new Equals(Source.EMPTY, longField, new Literal(Source.EMPTY, 100_000L, DataType.LONG)),
layout(longField)
).get(driverContext);
}
case "long_equal_to_long" -> {
FieldAttribute lhs = longField();
FieldAttribute rhs = longField();
yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext);
yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext);
}
case "long_equal_to_int" -> {
FieldAttribute lhs = longField();
FieldAttribute rhs = intField();
yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext);
yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext);
}
case "mv_min", "mv_min_ascending" -> {
FieldAttribute longField = longField();
yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get(driverContext);
yield EvalMapper.toEvaluator(FOLD_CONTEXT, new MvMin(Source.EMPTY, longField), layout(longField)).get(driverContext);
}
case "rlike" -> {
FieldAttribute keywordField = keywordField();
RLike rlike = new RLike(Source.EMPTY, keywordField, new RLikePattern(".ar"));
yield EvalMapper.toEvaluator(rlike, layout(keywordField)).get(driverContext);
yield EvalMapper.toEvaluator(FOLD_CONTEXT, rlike, layout(keywordField)).get(driverContext);
}
default -> throw new UnsupportedOperationException();
};
Expand Down
5 changes: 5 additions & 0 deletions docs/changelog/118602.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 118602
summary: Limit memory usage of `fold`
area: ES|QL
type: bug
issues: []

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 12 additions & 0 deletions docs/reference/esql/functions/kibana/definition/date_format.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 8 additions & 1 deletion docs/reference/esql/functions/kibana/docs/match_operator.md

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions docs/reference/esql/functions/types/date_format.asciidoc

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit 182e96e

Please sign in to comment.