From ce13bd16dead1c53682aff8e0e3c18cfd5605f7c Mon Sep 17 00:00:00 2001 From: lifengchao Date: Mon, 25 Sep 2023 11:05:24 +0800 Subject: [PATCH] =?UTF-8?q?druid-hdrhistogram=20druid=E7=89=88=E6=9C=AC?= =?UTF-8?q?=E5=8D=87=E7=BA=A7=E5=88=B026.0.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- druid-hdrhistogram/pom.xml | 21 +- .../HdrHistogramAggregatorFactory.java | 24 +- .../HdrHistogramMergeAggregatorFactory.java | 7 +- .../HdrHistogram/HdrHistogramModule.java | 3 +- ...rHistogramToPercentilesPostAggregator.java | 7 + .../HdrHistogramToQuantilePostAggregator.java | 7 + ...HdrHistogramToQuantilesPostAggregator.java | 7 + .../sql/HdrHistogramObjectSqlAggregator.java | 13 +- ...istogramPercentilesOperatorConversion.java | 11 +- .../HdrHistogramQuantileSqlAggregator.java | 33 +- ...rHistogramQuantilesOperatorConversion.java | 54 +-- .../HdrHistogramBufferAggregatorTest.java | 10 +- ...HdrHistogramQuantileSqlAggregatorTest.java | 405 +++++++++--------- .../sketch/hlld/HllAggregatorFactory.java | 3 +- 14 files changed, 314 insertions(+), 291 deletions(-) diff --git a/druid-hdrhistogram/pom.xml b/druid-hdrhistogram/pom.xml index adc85a2..892bc1a 100644 --- a/druid-hdrhistogram/pom.xml +++ b/druid-hdrhistogram/pom.xml @@ -5,7 +5,7 @@ 4.0.0 org.apache.druid.extensions - druid-hdrhistogram_0.18.1 + druid-hdrhistogram_26.0.0 druid-hdrhistogram 1.0-SNAPSHOT @@ -14,7 +14,7 @@ UTF-8 1.8 1.8 - 0.18.1 + 26.0.0 @@ -45,6 +45,13 @@ + + org.easymock + easymock + 4.3 + test + + org.apache.druid druid-processing @@ -54,9 +61,17 @@ org.apache.druid - druid-benchmarks + druid-server ${druid.version} test + test-jar + + + org.apache.druid + druid-sql + ${druid.version} + test-jar + test junit diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramAggregatorFactory.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramAggregatorFactory.java index 8596fc3..679125f 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramAggregatorFactory.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramAggregatorFactory.java @@ -1,8 +1,6 @@ package org.apache.druid.query.aggregation.sketch.HdrHistogram; import com.fasterxml.jackson.annotation.JsonProperty; -import org.HdrHistogram.DirectHistogram; -import org.HdrHistogram.Histogram; import org.HdrHistogram.HistogramSketch; import org.HdrHistogram.HistogramUnion; import org.apache.druid.java.util.common.IAE; @@ -10,6 +8,7 @@ import org.apache.druid.query.aggregation.*; import org.apache.druid.query.cache.CacheKeyBuilder; import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.ColumnValueSelector; +import org.apache.druid.segment.column.ColumnType; import javax.annotation.Nullable; import java.util.Collections; @@ -208,15 +207,27 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory { ); } + @Override + public AggregatorFactory withName(String newName) { + return new HdrHistogramAggregatorFactory(newName, fieldName, lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, autoResize); + } + @Override public Object deserialize(Object object) { return HistogramUtils.deserializeHistogram(object); } + @Override + public ColumnType getResultType() { + //return ColumnType.LONG; + return getIntermediateType(); + } + @Nullable @Override public Object finalizeComputation(@Nullable Object object) { - return object == null ? null : ((HistogramSketch) object).getTotalCount(); + //return object == null ? null : ((HistogramSketch) object).getTotalCount(); + return object; } @Override @@ -250,9 +261,16 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory { return autoResize; } + /* + 没这个方法了, 新版本需要实现getIntermediateType方法 @Override public String getTypeName() { return HdrHistogramModule.HDRHISTOGRAM_TYPE_NAME; + }*/ + + @Override + public ColumnType getIntermediateType() { + return HdrHistogramModule.TYPE; } @Override diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramMergeAggregatorFactory.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramMergeAggregatorFactory.java index 2198f06..85dae33 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramMergeAggregatorFactory.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramMergeAggregatorFactory.java @@ -1,9 +1,9 @@ package org.apache.druid.query.aggregation.sketch.HdrHistogram; import com.fasterxml.jackson.annotation.JsonProperty; -import org.HdrHistogram.Histogram; import org.HdrHistogram.HistogramSketch; import org.apache.druid.query.aggregation.Aggregator; +import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.BufferAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; import org.apache.druid.segment.ColumnSelectorFactory; @@ -48,6 +48,11 @@ public class HdrHistogramMergeAggregatorFactory extends HdrHistogramAggregatorFa ); } + @Override + public AggregatorFactory withName(String newName) { + return new HdrHistogramMergeAggregatorFactory(newName, fieldName, lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, autoResize); + } + @Override public byte[] getCacheKey() { return new CacheKeyBuilder(HdrHistogramModule.CACHE_TYPE_ID_OFFSET).appendByte(HdrHistogramModule.QUANTILES_HDRHISTOGRAM_MERGE_CACHE_TYPE_ID) diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramModule.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramModule.java index 117feda..5041965 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramModule.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramModule.java @@ -7,13 +7,13 @@ import com.fasterxml.jackson.databind.jsontype.NamedType; import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.common.annotations.VisibleForTesting; import com.google.inject.Binder; -import org.HdrHistogram.Histogram; import org.HdrHistogram.HistogramSketch; import org.apache.druid.initialization.DruidModule; import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramObjectSqlAggregator; import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramPercentilesOperatorConversion; import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramQuantileSqlAggregator; import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramQuantilesOperatorConversion; +import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.sql.guice.SqlBindings; @@ -29,6 +29,7 @@ public class HdrHistogramModule implements DruidModule { public static final byte QUANTILES_HDRHISTOGRAM_TO_PERCENTILES_CACHE_TYPE_ID = 0x05; public static final String HDRHISTOGRAM_TYPE_NAME = "HdrHistogramSketch"; + public static final ColumnType TYPE = ColumnType.ofComplex(HDRHISTOGRAM_TYPE_NAME); public static final ObjectMapper objectMapper = new ObjectMapper(); diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToPercentilesPostAggregator.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToPercentilesPostAggregator.java index 96ba73a..94c6def 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToPercentilesPostAggregator.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToPercentilesPostAggregator.java @@ -9,6 +9,8 @@ import org.apache.druid.java.util.common.IAE; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; +import org.apache.druid.segment.ColumnInspector; +import org.apache.druid.segment.column.ColumnType; import javax.annotation.Nullable; import java.util.*; @@ -29,6 +31,11 @@ public class HdrHistogramToPercentilesPostAggregator implements PostAggregator { this.percentileTicksPerHalfDistance = percentileTicksPerHalfDistance; } + @Override + public ColumnType getType(ColumnInspector signature){ + return ColumnType.STRING; + } + @Override @JsonProperty public String getName() { diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToQuantilePostAggregator.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToQuantilePostAggregator.java index e7f37c9..e106fbe 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToQuantilePostAggregator.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToQuantilePostAggregator.java @@ -9,6 +9,8 @@ import org.apache.druid.java.util.common.IAE; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; +import org.apache.druid.segment.ColumnInspector; +import org.apache.druid.segment.column.ColumnType; import javax.annotation.Nullable; import java.util.Comparator; @@ -36,6 +38,11 @@ public class HdrHistogramToQuantilePostAggregator implements PostAggregator { } } + @Override + public ColumnType getType(ColumnInspector signature){ + return ColumnType.LONG; + } + @Override public Set getDependentFields() { return Sets.newHashSet(fieldName); diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToQuantilesPostAggregator.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToQuantilesPostAggregator.java index 216947f..c7bf73d 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToQuantilesPostAggregator.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramToQuantilesPostAggregator.java @@ -9,6 +9,8 @@ import org.apache.druid.java.util.common.IAE; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; +import org.apache.druid.segment.ColumnInspector; +import org.apache.druid.segment.column.ColumnType; import javax.annotation.Nullable; import java.util.*; @@ -29,6 +31,11 @@ public class HdrHistogramToQuantilesPostAggregator implements PostAggregator { this.probabilitys = probabilitys; } + @Override + public ColumnType getType(ColumnInspector signature){ + return ColumnType.LONG_ARRAY; + } + @Override @JsonProperty public String getName() { diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramObjectSqlAggregator.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramObjectSqlAggregator.java index 6a47da7..5d522b6 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramObjectSqlAggregator.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramObjectSqlAggregator.java @@ -18,6 +18,7 @@ import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramAggregatorFactory; import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramMergeAggregatorFactory; import org.apache.druid.segment.VirtualColumn; +import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.column.ValueType; import org.apache.druid.sql.calcite.aggregation.Aggregation; @@ -118,11 +119,11 @@ public class HdrHistogramObjectSqlAggregator implements SqlAggregator { } // No existing match found. Create a new one. - final List virtualColumns = new ArrayList<>(); + // 新版本删除了final List virtualColumns = new ArrayList<>(); if (input.isDirectColumnAccess()) { // 参数是Histogram对象 - if (rowSignature.getColumnType(input.getDirectColumn()).orElse(null) == ValueType.COMPLEX) { + if (rowSignature.getColumnType(input.getDirectColumn()).map(type -> type.is(ValueType.COMPLEX)).orElse(false)) { aggregatorFactory = new HdrHistogramMergeAggregatorFactory( histogramName, input.getDirectColumn(), @@ -142,12 +143,11 @@ public class HdrHistogramObjectSqlAggregator implements SqlAggregator { ); } } else { - final VirtualColumn virtualColumn = - virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, input, SqlTypeName.BIGINT); - virtualColumns.add(virtualColumn); + final String virtualColumnName = + virtualColumnRegistry.getOrCreateVirtualColumnForExpression(input, ColumnType.LONG); aggregatorFactory = new HdrHistogramAggregatorFactory( histogramName, - virtualColumn.getOutputName(), + virtualColumnName, lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, @@ -156,7 +156,6 @@ public class HdrHistogramObjectSqlAggregator implements SqlAggregator { } return Aggregation.create( - virtualColumns, ImmutableList.of(aggregatorFactory), null ); diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramPercentilesOperatorConversion.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramPercentilesOperatorConversion.java index 710fd69..d683f0e 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramPercentilesOperatorConversion.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramPercentilesOperatorConversion.java @@ -14,16 +14,16 @@ import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramToPerc import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator; import org.apache.druid.segment.column.RowSignature; -import org.apache.druid.sql.calcite.expression.DirectOperatorConversion; import org.apache.druid.sql.calcite.expression.DruidExpression; import org.apache.druid.sql.calcite.expression.OperatorConversions; import org.apache.druid.sql.calcite.expression.PostAggregatorVisitor; +import org.apache.druid.sql.calcite.expression.SqlOperatorConversion; import org.apache.druid.sql.calcite.planner.PlannerContext; import javax.annotation.Nullable; import java.util.List; -public class HdrHistogramPercentilesOperatorConversion extends DirectOperatorConversion { +public class HdrHistogramPercentilesOperatorConversion implements SqlOperatorConversion { private static final String FUNCTION_NAME = "HDR_GET_PERCENTILES"; private static final SqlFunction SQL_FUNCTION = OperatorConversions .operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME)) @@ -32,10 +32,6 @@ public class HdrHistogramPercentilesOperatorConversion extends DirectOperatorCon .returnTypeInference(ReturnTypes.explicit(SqlTypeName.VARCHAR)) .build(); - public HdrHistogramPercentilesOperatorConversion() { - super(SQL_FUNCTION, FUNCTION_NAME); - } - @Override public SqlOperator calciteOperator() { @@ -66,7 +62,8 @@ public class HdrHistogramPercentilesOperatorConversion extends DirectOperatorCon plannerContext, rowSignature, operands.get(0), - postAggregatorVisitor + postAggregatorVisitor, + true ); if (postAgg == null) { diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantileSqlAggregator.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantileSqlAggregator.java index b23489d..b14c1aa 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantileSqlAggregator.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantileSqlAggregator.java @@ -16,6 +16,7 @@ import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramAggreg import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramMergeAggregatorFactory; import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramToQuantilePostAggregator; import org.apache.druid.segment.VirtualColumn; +import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; @@ -141,22 +142,16 @@ public class HdrHistogramQuantileSqlAggregator implements SqlAggregator { // Check input for equivalence. final boolean inputMatches; - final VirtualColumn virtualInput = existing.getVirtualColumns() - .stream() - .filter( - virtualColumn -> - virtualColumn.getOutputName() - .equals(theFactory.getFieldName()) - ) - .findFirst() - .orElse(null); + final DruidExpression virtualInput = + virtualColumnRegistry.findVirtualColumnExpressions(theFactory.requiredFields()) + .stream() + .findFirst() + .orElse(null); if (virtualInput == null) { - inputMatches = input.isDirectColumnAccess() - && input.getDirectColumn().equals(theFactory.getFieldName()); + inputMatches = input.isDirectColumnAccess() && input.getDirectColumn().equals(theFactory.getFieldName()); } else { - inputMatches = ((ExpressionVirtualColumn) virtualInput).getExpression() - .equals(input.getExpression()); + inputMatches = virtualInput.equals(input); } final boolean matches = inputMatches @@ -177,11 +172,11 @@ public class HdrHistogramQuantileSqlAggregator implements SqlAggregator { } // No existing match found. Create a new one. - final List virtualColumns = new ArrayList<>(); + //final List virtualColumns = new ArrayList<>(); if (input.isDirectColumnAccess()) { // 参数是Histogram对象 - if (rowSignature.getColumnType(input.getDirectColumn()).orElse(null) == ValueType.COMPLEX) { + if (rowSignature.getColumnType(input.getDirectColumn()).map(type -> type.is(ValueType.COMPLEX)).orElse(false)) { aggregatorFactory = new HdrHistogramMergeAggregatorFactory( histogramName, input.getDirectColumn(), @@ -201,12 +196,11 @@ public class HdrHistogramQuantileSqlAggregator implements SqlAggregator { ); } } else { - final VirtualColumn virtualColumn = - virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, input, SqlTypeName.BIGINT); - virtualColumns.add(virtualColumn); + final String virtualColumnName = + virtualColumnRegistry.getOrCreateVirtualColumnForExpression(input, ColumnType.LONG); aggregatorFactory = new HdrHistogramAggregatorFactory( histogramName, - virtualColumn.getOutputName(), + virtualColumnName, lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, @@ -234,7 +228,6 @@ public class HdrHistogramQuantileSqlAggregator implements SqlAggregator { } return Aggregation.create( - virtualColumns, ImmutableList.of(aggregatorFactory), new HdrHistogramToQuantilePostAggregator(name, histogramName, probability) ); diff --git a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantilesOperatorConversion.java b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantilesOperatorConversion.java index ce75587..a14a15e 100644 --- a/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantilesOperatorConversion.java +++ b/druid-hdrhistogram/src/main/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantilesOperatorConversion.java @@ -62,50 +62,30 @@ public class HdrHistogramQuantilesOperatorConversion implements SqlOperatorConve { final List operands = ((RexCall) rexNode).getOperands(); final float[] args = new float[operands.size() - 1]; - PostAggregator postAgg = null; - int operandCounter = 0; - for (RexNode operand : operands) { - final PostAggregator convertedPostAgg = OperatorConversions.toPostAggregator( - plannerContext, - rowSignature, - operand, - postAggregatorVisitor - ); - if (convertedPostAgg == null) { - if (operandCounter > 0) { - try { - if (!operand.isA(SqlKind.LITERAL)) { - return null; - } - float arg = ((Number) RexLiteral.value(operand)).floatValue(); - args[operandCounter - 1] = arg; - } - catch (ClassCastException cce) { - return null; - } - } else { - return null; - } - } else { - if (operandCounter == 0) { - postAgg = convertedPostAgg; - } else { - if (!operand.isA(SqlKind.LITERAL)) { - return null; - } - } - } - operandCounter++; + // 新版本直接就从第一个参数取 + final PostAggregator inputSketchPostAgg = OperatorConversions.toPostAggregator( + plannerContext, + rowSignature, + operands.get(0), + postAggregatorVisitor, + true + ); + + if (inputSketchPostAgg == null) { + return null; } - if (postAgg == null) { - return null; + // 直接解析 + for (int i = 1; i < operands.size(); i++) { + RexNode operand = operands.get(i); + float arg = ((Number) RexLiteral.value(operand)).floatValue(); + args[i - 1] = arg; } return new HdrHistogramToQuantilesPostAggregator( postAggregatorVisitor.getOutputNamePrefix() + postAggregatorVisitor.getAndIncrementCounter(), - ((FieldAccessPostAggregator)postAgg).getFieldName(), + ((FieldAccessPostAggregator)inputSketchPostAgg).getFieldName(), args ); } diff --git a/druid-hdrhistogram/src/test/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramBufferAggregatorTest.java b/druid-hdrhistogram/src/test/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramBufferAggregatorTest.java index de409c8..8c3e0b2 100644 --- a/druid-hdrhistogram/src/test/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramBufferAggregatorTest.java +++ b/druid-hdrhistogram/src/test/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/HdrHistogramBufferAggregatorTest.java @@ -2,17 +2,13 @@ package org.apache.druid.query.aggregation.sketch.HdrHistogram; import com.google.common.collect.ImmutableMap; import org.HdrHistogram.*; -import org.apache.datasketches.theta.Sketches; -import org.apache.datasketches.theta.UpdateSketch; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.BufferAggregator; import org.apache.druid.query.aggregation.TestLongColumnSelector; import org.apache.druid.query.aggregation.TestObjectColumnSelector; -import org.apache.druid.query.aggregation.datasketches.theta.SketchHolder; -import org.apache.druid.query.aggregation.datasketches.theta.SketchMergeAggregatorFactory; import org.apache.druid.query.groupby.epinephelinae.GrouperTestUtil; -import org.apache.druid.query.groupby.epinephelinae.TestColumnSelectorFactory; +import org.apache.druid.query.groupby.epinephelinae.GroupByTestColumnSelectorFactory; import org.apache.druid.segment.ColumnSelectorFactory; import org.junit.Assert; import org.junit.Test; @@ -230,7 +226,7 @@ public class HdrHistogramBufferAggregatorTest { @Test public void testMergeAggregatorRelocate() { - final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); + final GroupByTestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); HistogramSketch histogram = new HistogramSketch(3); for (int i = 0; i < 100000; i++) { histogram.recordValue(i); @@ -252,7 +248,7 @@ public class HdrHistogramBufferAggregatorTest { @Test public void testAggregatorRelocate() { - final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); + final GroupByTestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); HistogramSketch histogram = new HistogramSketch(3); for (int i = 0; i < 100000; i++) { histogram.recordValue(i); diff --git a/druid-hdrhistogram/src/test/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantileSqlAggregatorTest.java b/druid-hdrhistogram/src/test/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantileSqlAggregatorTest.java index 054dc05..69533fd 100644 --- a/druid-hdrhistogram/src/test/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantileSqlAggregatorTest.java +++ b/druid-hdrhistogram/src/test/java/org/apache/druid/query/aggregation/sketch/HdrHistogram/sql/HdrHistogramQuantileSqlAggregatorTest.java @@ -1,12 +1,15 @@ package org.apache.druid.query.aggregation.sketch.HdrHistogram.sql; +import com.alibaba.fastjson2.JSON; import com.fasterxml.jackson.databind.Module; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; +import com.google.inject.Injector; import org.apache.calcite.schema.SchemaPlus; import org.apache.druid.data.input.InputRow; +import org.apache.druid.guice.DruidInjectorBuilder; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.query.Druids; @@ -27,66 +30,49 @@ import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; import org.apache.druid.segment.IndexBuilder; import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.TestHelper; +import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.server.security.AuthenticationResult; -import org.apache.druid.sql.SqlLifecycle; -import org.apache.druid.sql.SqlLifecycleFactory; +import org.apache.druid.sql.calcite.BaseCalciteQueryTest; +import org.apache.druid.sql.calcite.QueryTestBuilder; +import org.apache.druid.sql.calcite.QueryTestRunner; import org.apache.druid.sql.calcite.filtration.Filtration; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.planner.PlannerFactory; -import org.apache.druid.sql.calcite.util.CalciteTestBase; -import org.apache.druid.sql.calcite.util.CalciteTests; -import org.apache.druid.sql.calcite.util.QueryLogHook; -import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; +import org.apache.druid.sql.calcite.util.*; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.LinearShardSpec; import org.junit.*; import org.junit.rules.TemporaryFolder; +import java.io.File; import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; +import java.util.*; -public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { - private static final String DATA_SOURCE = "foo"; - - private static QueryRunnerFactoryConglomerate conglomerate; - private static Closer resourceCloser; - private static AuthenticationResult authenticationResult = CalciteTests.REGULAR_USER_AUTH_RESULT; - private static final Map QUERY_CONTEXT_DEFAULT = ImmutableMap.of( - PlannerContext.CTX_SQL_QUERY_ID, "dummy" - ); - - @Rule - public TemporaryFolder temporaryFolder = new TemporaryFolder(); - - @Rule - public QueryLogHook queryLogHook = QueryLogHook.create(); - - private SpecificSegmentsQuerySegmentWalker walker; - private SqlLifecycleFactory sqlLifecycleFactory; - - @BeforeClass - public static void setUpClass() { - resourceCloser = Closer.create(); - conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(resourceCloser); +public class HdrHistogramQuantileSqlAggregatorTest extends BaseCalciteQueryTest { + @Override + public void gatherProperties(Properties properties) + { + super.gatherProperties(properties); } - @AfterClass - public static void tearDownClass() throws IOException { - resourceCloser.close(); + @Override + public void configureGuice(DruidInjectorBuilder builder) + { + super.configureGuice(builder); + builder.addModule(new HdrHistogramModule()); } public static final List ROWS1 = ImmutableList.of( - CalciteTests.createRow( + TestDataBuilder.createRow( ImmutableMap.builder() .put("t", "2000-01-01") .put("m1", "1") @@ -96,7 +82,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { .put("dim3", ImmutableList.of("a", "b")) .build() ), - CalciteTests.createRow( + TestDataBuilder.createRow( ImmutableMap.builder() .put("t", "2000-01-02") .put("m1", "2.0") @@ -106,7 +92,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { .put("dim3", ImmutableList.of("b", "c")) .build() ), - CalciteTests.createRow( + TestDataBuilder.createRow( ImmutableMap.builder() .put("t", "2000-01-03") .put("m1", "3.0") @@ -116,7 +102,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { .put("dim3", ImmutableList.of("d")) .build() ), - CalciteTests.createRow( + TestDataBuilder.createRow( ImmutableMap.builder() .put("t", "2001-01-01") .put("m1", "4.0") @@ -126,7 +112,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { .put("dim3", ImmutableList.of("")) .build() ), - CalciteTests.createRow( + TestDataBuilder.createRow( ImmutableMap.builder() .put("t", "2001-01-02") .put("m1", "5.0") @@ -136,7 +122,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { .put("dim3", ImmutableList.of()) .build() ), - CalciteTests.createRow( + TestDataBuilder.createRow( ImmutableMap.builder() .put("t", "2001-01-03") .put("m1", "6.0") @@ -146,15 +132,20 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { ) ); - @Before - public void setUp() throws Exception { + @SuppressWarnings("resource") + @Override + public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( + final QueryRunnerFactoryConglomerate conglomerate, + final JoinableFactoryWrapper joinableFactory, + final Injector injector + ) throws IOException{ HdrHistogramModule.registerSerde(); for (Module mod : new HdrHistogramModule().getJacksonModules()) { CalciteTests.getJsonMapper().registerModule(mod); TestHelper.JSON_MAPPER.registerModule(mod); } - - final QueryableIndex index = IndexBuilder.create() + final QueryableIndex index = TestHelper.getTestIndexIO().loadIndex(new File("D:/doc/datas/testIndex-6201298")); + /*final QueryableIndex index = IndexBuilder.create() .tmpDir(temporaryFolder.newFolder()) .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()) .schema( @@ -176,11 +167,11 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { ) //.rows(CalciteTests.ROWS1) .rows(ROWS1) - .buildMMappedIndex(); + .buildMMappedIndex();*/ - walker = new SpecificSegmentsQuerySegmentWalker(conglomerate).add( + return new SpecificSegmentsQuerySegmentWalker(conglomerate).add( DataSegment.builder() - .dataSource(DATA_SOURCE) + .dataSource(CalciteTests.DATASOURCE1) .interval(index.getDataInterval()) .version("1") .shardSpec(new LinearShardSpec(0)) @@ -188,69 +179,77 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { .build(), index ); - - final PlannerConfig plannerConfig = new PlannerConfig(); - final DruidOperatorTable operatorTable = new DruidOperatorTable( - ImmutableSet.of( - new HdrHistogramQuantileSqlAggregator(), - new HdrHistogramObjectSqlAggregator() - ), - ImmutableSet.of( - new HdrHistogramQuantilesOperatorConversion(), - new HdrHistogramPercentilesOperatorConversion() - ) - ); - SchemaPlus rootSchema = - CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER); - - sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory( - new PlannerFactory( - rootSchema, - CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate), - operatorTable, - CalciteTests.createExprMacroTable(), - plannerConfig, - AuthTestUtils.TEST_AUTHORIZER_MAPPER, - CalciteTests.getJsonMapper(), - CalciteTests.DRUID_SCHEMA_NAME - ) - ); - } - - @After - public void tearDown() throws Exception { - walker.close(); - walker = null; } @Test public void testSqlQuery() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); - String sql = "select * from druid.foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); + String[] columns = new String[]{"__time", "dim1", "dim2", "dim3", "cnt", "hist_m1", "m1"}; + String sql = "select " + String.join(",", columns) + " from druid.foo"; + QueryTestBuilder builder = testBuilder().sql(sql); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { - System.out.println(Arrays.toString(result)); + Map row = new LinkedHashMap(); + for (int i = 0; i < result.length; i++) { + row.put(columns[i], result[i]); + } + System.out.println(JSON.toJSONString(row)); + // System.out.println(Arrays.toString(result)); + } + + for (int i = 0; i < columns.length; i++) { + Object[] values = new Object[results.size()]; + for (int j = 0; j < results.size(); j++) { + values[j] = results.get(j)[i]; + } + System.out.println(columns[i] + ":" + Arrays.toString(values)); } } - @Test + @Test public void testGroup() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); String sql = "select cnt, APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2) from druid.foo group by cnt"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); - for (Object[] result : results) { - System.out.println(Arrays.toString(result)); - } + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; + for (Object[] result : results) { + System.out.println(Arrays.toString(result)); + } } @Test public void testGroup2() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); String sql = "select HDR_HISTOGRAM(hist_m1) from druid.foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; + for (Object[] result : results) { + System.out.println(Arrays.toString(result)); + } + } + + @Test + public void testGroup3() throws Exception { + String sql = "select APPROX_QUANTILE_HDR(h, 0.5) from(select HDR_HISTOGRAM(hist_m1) h from druid.foo) t"; + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; + for (Object[] result : results) { + System.out.println(Arrays.toString(result)); + } + } + + @Test + public void testGroup4() throws Exception { + String sql = "select hdr_get_quantiles(h, 0.1, 0.2, 0.3, 0.5, 0.9, 0.99, 1) from(select HDR_HISTOGRAM(hist_m1) h from druid.foo) t"; + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { System.out.println(Arrays.toString(result)); } @@ -258,10 +257,11 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { @Test public void testSqlQueryGeneHdr() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); String sql = "select HDR_HISTOGRAM(hist_m1, 1, 100, 2), HDR_HISTOGRAM(cnt, 1, 100, 2) from druid.foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { System.out.println(Arrays.toString(result)); } @@ -269,11 +269,12 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { @Test public void testSqlQueryGeneHdr2() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); // HDR_HISTOGRAM(hist_m1, 1, 100, 2), String sql = "select HDR_GET_QUANTILES(HDR_HISTOGRAM(m1, 1, 100, 2), 0.1, 0.2, 0.3, 0.5, 0.9, 1) from druid.foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { System.out.println(Arrays.toString(result)); } @@ -281,44 +282,47 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { @Test public void testSqlQueryGeneHdrArgs() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); String sql = "select HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1), 0.1, 0.2, 0.3, 0.5, 0.9, 1), " + "HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1, 2), 0.1, 0.2, 0.3, 0.5, 0.9, 1) ,\n" + "HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1, 1, 110, 2), 0.1, 0.2, 0.3, 0.5, 0.9, 1) ,\n" + "HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1, 1, 110, 2, false), 0.1, 0.2, 0.3, 0.5, 0.9, 1) \n" + "from druid.foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { System.out.println(Arrays.toString(result)); } } - @Test + @Test public void testSqlQueryGeneHdrArgs2() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); String sql = "select APPROX_QUANTILE_HDR(m1, 0.1), " + "APPROX_QUANTILE_HDR(m1, 0.1, 2) ,\n" + "APPROX_QUANTILE_HDR(m1, 0.1, 1, 110, 2) ,\n" + "APPROX_QUANTILE_HDR(m1, 0.1, 1, 110, 2, false)\n" + "from druid.foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); - for (Object[] result : results) { - System.out.println(Arrays.toString(result)); - } + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; + for (Object[] result : results) { + System.out.println(Arrays.toString(result)); + } } @Test public void testSqlQueryGeneHdr3() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); // 函数不区分大小写 // HDR_HISTOGRAM(hist_m1, 1, 100, 2), //String sql = "select HDR_GET_PERCENTILES(HDR_HISTOGRAM(m1, 1, 100, 2)) from druid.foo"; //String sql = "select hdr_get_percentiles(hdr_histogram(m1, 1, 100, 2)) from druid.foo"; String sql = "select hdr_get_percentiles(hdr_histogram(hist_m1, 1, 100, 2)) from druid.foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { System.out.println(Arrays.toString(result)); } @@ -326,7 +330,6 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { @Test public void testSqlQueryQuantiles() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); String sql = "SELECT\n" + "APPROX_QUANTILE_HDR(m1, 0.01, 1, 100, 2),\n" + "APPROX_QUANTILE_HDR(m1, 0.5, 1, 100, 2),\n" @@ -338,9 +341,10 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { + "APPROX_QUANTILE_HDR(m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc'),\n" + "APPROX_QUANTILE_HDR(cnt, 0.5, 1, 100, 2)\n" + "FROM foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); - System.out.println(sql); + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { System.out.println(Arrays.toString(result)); } @@ -348,7 +352,6 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { @Test public void testSqlQueryQuantilesOnComplexColumn() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); String sql = "SELECT\n" + "APPROX_QUANTILE_HDR(hist_m1, 0.01, 1, 100, 2),\n" + "APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2),\n" @@ -358,9 +361,10 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { + "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 <> 'abc'),\n" + "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc')\n" + "FROM foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); - System.out.println(sql); + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { System.out.println(Arrays.toString(result)); } @@ -373,7 +377,6 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { @Test public void testQuantileOnFloatAndLongs() throws Exception { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); String sql = "SELECT\n" + "APPROX_QUANTILE_HDR(m1, 0.01, 1, 100, 2),\n" + "APPROX_QUANTILE_HDR(m1, 0.5, 1, 100, 2),\n" @@ -385,60 +388,55 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { + "APPROX_QUANTILE_HDR(m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc'),\n" + "APPROX_QUANTILE_HDR(cnt, 0.5, 1, 100, 2)\n" + "FROM foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); - System.out.println(sql); + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder = builder.expectedQueries(Collections.singletonList(Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) + .granularity(Granularities.ALL) + .virtualColumns( + new ExpressionVirtualColumn( + "v0", + "(\"m1\" * 2)", + ColumnType.LONG, + TestExprMacroTable.INSTANCE + ) + ) + .aggregators(ImmutableList.of( + new HdrHistogramAggregatorFactory("a0:agg", "m1", 1L, 100L, 2, true), + new HdrHistogramAggregatorFactory("a4:agg", "v0", 1L, 100L, 2, true), + new FilteredAggregatorFactory( + new HdrHistogramAggregatorFactory("a5:agg", "m1", 1L, 100L, 2, true), + new SelectorDimFilter("dim1", "abc", null) + ), + new FilteredAggregatorFactory( + new HdrHistogramAggregatorFactory("a6:agg", "m1", 1L, 100L, 2, true), + new NotDimFilter(new SelectorDimFilter("dim1", "abc", null)) + ), + new HdrHistogramAggregatorFactory("a8:agg", "cnt", 1L, 100L, 2, true) + )) + .postAggregators( + new HdrHistogramToQuantilePostAggregator("a0", "a0:agg", 0.01f), + new HdrHistogramToQuantilePostAggregator("a1", "a0:agg", 0.50f), + new HdrHistogramToQuantilePostAggregator("a2", "a0:agg", 0.98f), + new HdrHistogramToQuantilePostAggregator("a3", "a0:agg", 0.99f), + new HdrHistogramToQuantilePostAggregator("a4", "a4:agg", 0.97f), + new HdrHistogramToQuantilePostAggregator("a5", "a5:agg", 0.99f), + new HdrHistogramToQuantilePostAggregator("a6", "a6:agg", 0.999f), + new HdrHistogramToQuantilePostAggregator("a7", "a5:agg", 0.999f), + new HdrHistogramToQuantilePostAggregator("a8", "a8:agg", 0.50f) + ) + .context(QUERY_CONTEXT_DEFAULT) + .build())); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { System.out.println(Arrays.toString(result)); } - - // Verify query - Assert.assertEquals( - Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .granularity(Granularities.ALL) - .virtualColumns( - new ExpressionVirtualColumn( - "v0", - "(\"m1\" * 2)", - ValueType.LONG, - TestExprMacroTable.INSTANCE - ) - ) - .aggregators(ImmutableList.of( - new HdrHistogramAggregatorFactory("a0:agg", "m1", 1L, 100L, 2, true), - new HdrHistogramAggregatorFactory("a4:agg", "v0", 1L, 100L, 2, true), - new FilteredAggregatorFactory( - new HdrHistogramAggregatorFactory("a5:agg", "m1", 1L, 100L, 2, true), - new SelectorDimFilter("dim1", "abc", null) - ), - new FilteredAggregatorFactory( - new HdrHistogramAggregatorFactory("a6:agg", "m1", 1L, 100L, 2, true), - new NotDimFilter(new SelectorDimFilter("dim1", "abc", null)) - ), - new HdrHistogramAggregatorFactory("a8:agg", "cnt", 1L, 100L, 2, true) - )) - .postAggregators( - new HdrHistogramToQuantilePostAggregator("a0", "a0:agg", 0.01f), - new HdrHistogramToQuantilePostAggregator("a1", "a0:agg", 0.50f), - new HdrHistogramToQuantilePostAggregator("a2", "a0:agg", 0.98f), - new HdrHistogramToQuantilePostAggregator("a3", "a0:agg", 0.99f), - new HdrHistogramToQuantilePostAggregator("a4", "a4:agg", 0.97f), - new HdrHistogramToQuantilePostAggregator("a5", "a5:agg", 0.99f), - new HdrHistogramToQuantilePostAggregator("a6", "a6:agg", 0.999f), - new HdrHistogramToQuantilePostAggregator("a7", "a5:agg", 0.999f), - new HdrHistogramToQuantilePostAggregator("a8", "a8:agg", 0.50f) - ) - .context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy")) - .build(), - Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) - ); } @Test public void testQuantileOnComplexColumn() throws Exception{ - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); String sql = "SELECT\n" + "APPROX_QUANTILE_HDR(hist_m1, 0.01, 1, 100, 2),\n" + "APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2),\n" @@ -448,43 +446,42 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase { + "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 <> 'abc'),\n" + "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc')\n" + "FROM foo"; - final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); - System.out.println(sql); + QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize(); + builder = builder.expectedQueries(Collections.singletonList(Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) + .granularity(Granularities.ALL) + .aggregators(ImmutableList.of( + new HdrHistogramMergeAggregatorFactory("a0:agg", "hist_m1", 1L, 100L, 2, true), + new FilteredAggregatorFactory( + new HdrHistogramMergeAggregatorFactory("a4:agg", "hist_m1", 1L, 100L, 2, true), + new SelectorDimFilter("dim1", "abc", null) + ), + new FilteredAggregatorFactory( + new HdrHistogramMergeAggregatorFactory("a5:agg", "hist_m1", 1L, 100L, 2, true), + new NotDimFilter(new SelectorDimFilter("dim1", "abc", null)) + ) + )) + .postAggregators( + new HdrHistogramToQuantilePostAggregator("a0", "a0:agg", 0.01f), + new HdrHistogramToQuantilePostAggregator("a1", "a0:agg", 0.50f), + new HdrHistogramToQuantilePostAggregator("a2", "a0:agg", 0.98f), + new HdrHistogramToQuantilePostAggregator("a3", "a0:agg", 0.99f), + new HdrHistogramToQuantilePostAggregator("a4", "a4:agg", 0.99f), + new HdrHistogramToQuantilePostAggregator("a5", "a5:agg", 0.999f), + new HdrHistogramToQuantilePostAggregator("a6", "a4:agg", 0.999f) + ) + .context(QUERY_CONTEXT_DEFAULT) + .build())); + builder.run(); + QueryTestRunner.QueryResults queryResults = builder.results(); + List results = queryResults.results; for (Object[] result : results) { System.out.println(Arrays.toString(result)); } - // Verify query - Assert.assertEquals( - Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .granularity(Granularities.ALL) - .aggregators(ImmutableList.of( - new HdrHistogramMergeAggregatorFactory("a0:agg", "hist_m1", 1L, 100L, 2, true), - new FilteredAggregatorFactory( - new HdrHistogramMergeAggregatorFactory("a4:agg", "hist_m1", 1L, 100L, 2, true), - new SelectorDimFilter("dim1", "abc", null) - ), - new FilteredAggregatorFactory( - new HdrHistogramMergeAggregatorFactory("a5:agg", "hist_m1", 1L, 100L, 2, true), - new NotDimFilter(new SelectorDimFilter("dim1", "abc", null)) - ) - )) - .postAggregators( - new HdrHistogramToQuantilePostAggregator("a0", "a0:agg", 0.01f), - new HdrHistogramToQuantilePostAggregator("a1", "a0:agg", 0.50f), - new HdrHistogramToQuantilePostAggregator("a2", "a0:agg", 0.98f), - new HdrHistogramToQuantilePostAggregator("a3", "a0:agg", 0.99f), - new HdrHistogramToQuantilePostAggregator("a4", "a4:agg", 0.99f), - new HdrHistogramToQuantilePostAggregator("a5", "a5:agg", 0.999f), - new HdrHistogramToQuantilePostAggregator("a6", "a4:agg", 0.999f) - ) - .context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy")) - .build(), - Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) - ); + + } private static PostAggregator makeFieldAccessPostAgg(String name) { diff --git a/druid-hlld/src/main/java/org/apache/druid/query/aggregation/sketch/hlld/HllAggregatorFactory.java b/druid-hlld/src/main/java/org/apache/druid/query/aggregation/sketch/hlld/HllAggregatorFactory.java index 6b6dd08..a5d0e0d 100644 --- a/druid-hlld/src/main/java/org/apache/druid/query/aggregation/sketch/hlld/HllAggregatorFactory.java +++ b/druid-hlld/src/main/java/org/apache/druid/query/aggregation/sketch/hlld/HllAggregatorFactory.java @@ -170,7 +170,8 @@ public class HllAggregatorFactory extends AggregatorFactory { @Override public ColumnType getResultType() { - return round ? ColumnType.LONG : ColumnType.DOUBLE; + //return round ? ColumnType.LONG : ColumnType.DOUBLE; + return getIntermediateType(); } @Nullable