优化:getMaxIntermediateSize返回值初始化计算一次cache,getMaxIntermediateSize每行数据都会调用一次

This commit is contained in:
lifengchao
2024-01-31 17:34:24 +08:00
parent eb64880203
commit 00db131a55
8 changed files with 1625 additions and 1567 deletions

View File

@@ -31,6 +31,7 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory {
protected final long highestTrackableValue;
protected final int numberOfSignificantValueDigits;
protected final boolean autoResize; //默认是false
protected final int updatableSerializationBytes;
public HdrHistogramAggregatorFactory(
@JsonProperty("name") String name,
@@ -80,6 +81,7 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory {
this.highestTrackableValue = highestTrackableValue;
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
this.autoResize = autoResize;
this.updatableSerializationBytes = getUpdatableSerializationBytes();
}
@Override
@@ -284,6 +286,10 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory {
@Override
public int getMaxIntermediateSize() {
return updatableSerializationBytes == 0? getUpdatableSerializationBytes():updatableSerializationBytes;
}
private int getUpdatableSerializationBytes(){
if(!autoResize){
/*Histogram histogram = new Histogram(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
histogram.setAutoResize(autoResize);

View File

@@ -56,6 +56,9 @@ public class HdrHistogramToPercentilesPostAggregator implements PostAggregator {
@Override
public Object compute(Map<String, Object> values) {
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
if(histogram == null){
return "[]"; //"[]"
}
List<Percentile> percentiles = histogram.percentileList(percentileTicksPerHalfDistance);
return HdrHistogramModule.toJson(percentiles);
}

View File

@@ -62,6 +62,9 @@ public class HdrHistogramToQuantilePostAggregator implements PostAggregator {
@Override
public Object compute(Map<String, Object> values) {
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
if(histogram == null){
return null;
}
return histogram.getValueAtPercentile(probability * 100);
}

View File

@@ -56,7 +56,11 @@ public class HdrHistogramToQuantilesPostAggregator implements PostAggregator {
@Override
public Object compute(Map<String, Object> values) {
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
final long[] counts = new long[probabilitys.length];
if(histogram == null){
//return null;
return new Long[probabilitys.length];
}
final Long[] counts = new Long[probabilitys.length];
for (int i = 0; i < probabilitys.length; i++) {
counts[i] = histogram.getValueAtPercentile(probabilitys[i] * 100);
}

View File

@@ -28,6 +28,7 @@ public class HllAggregatorFactory extends AggregatorFactory {
protected final String fieldName;
protected final int precision;
protected final boolean round;
protected final int updatableSerializationBytes;
public HllAggregatorFactory(
@JsonProperty("name") final String name,
@@ -45,6 +46,7 @@ public class HllAggregatorFactory extends AggregatorFactory {
this.fieldName = fieldName;
this.precision = precision == null ? DEFAULT_PRECISION : precision;
this.round = round == null ? DEFAULT_ROUND : round;
this.updatableSerializationBytes = getUpdatableSerializationBytes();
}
@Override
@@ -236,6 +238,10 @@ public class HllAggregatorFactory extends AggregatorFactory {
@Override
public int getMaxIntermediateSize() {
return updatableSerializationBytes == 0? getUpdatableSerializationBytes():updatableSerializationBytes;
}
protected int getUpdatableSerializationBytes(){
return Hll.getUpdatableSerializationBytes(precision);
}

View File

@@ -67,7 +67,7 @@ public class HllMergeAggregatorFactory extends HllAggregatorFactory{
}
@Override
public int getMaxIntermediateSize() {
protected int getUpdatableSerializationBytes() {
return HllUnion.getUpdatableSerializationBytes(precision);
}
}

View File

@@ -66,6 +66,9 @@ public class HllToEstimatePostAggregator implements PostAggregator {
@Override
public Object compute(final Map<String, Object> combinedAggregators) {
final Hll sketch = (Hll) field.compute(combinedAggregators);
if(sketch == null){
return round ? 0L: 0D;
}
return round ? Math.round(sketch.size()) : sketch.size();
}

View File

@@ -95,6 +95,7 @@ public class HllApproxCountDistinctSqlAggregatorTest extends BaseCalciteQueryTes
public void testSqlQuery() throws Exception {
// Can't vectorize due to SUBSTRING expression.
cannotVectorize();
String[] columns = new String[]{"__time", "dim1", "dim2", "dim3", "cnt", "hll_dim1", "m1"};
String sql = "select " + String.join(",", columns) + " from druid.foo";
@@ -120,6 +121,38 @@ public class HllApproxCountDistinctSqlAggregatorTest extends BaseCalciteQueryTes
}
}
@Test
public void testSqlQuery11() throws Exception {
// Can't vectorize due to SUBSTRING expression.
//cannotVectorize();
String sql = "select HLLD(hll_dim1) hll_dim1 from (select hll_dim1 from druid.foo limit 5) t ";
//sql = "select HLLD(hll_dim1) hll_dim1 from druid.foo t ";
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();;
builder.run();
QueryTestRunner.QueryResults queryResults = builder.results();
List<Object[]> results = queryResults.results;
for (Object[] result : results) {
System.out.println(Arrays.toString(result));
}
}
@Test
public void testSqlQuery12() throws Exception {
// Can't vectorize due to SUBSTRING expression.
cannotVectorize();
String sql = "select * from (select * from druid.foo limit 6) t where __time >= '1970-12-15 07:00:28' and __time < '2023-12-15 08:10:28' ";
QueryTestBuilder builder = testBuilder().sql(sql);
builder.run();
QueryTestRunner.QueryResults queryResults = builder.results();
List<Object[]> results = queryResults.results;
for (Object[] result : results) {
System.out.println(Arrays.toString(result));
}
}
@Test
public void testSqlQuery1() throws Exception {
// Can't vectorize due to SUBSTRING expression.