Compare commits
12 Commits
druid_0.18
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5765edf671 | ||
|
|
b754e83ba0 | ||
|
|
8c546e20d7 | ||
|
|
9a6c44112e | ||
|
|
38c22db84d | ||
|
|
25ab1b3f9d | ||
|
|
95e6e07ed9 | ||
|
|
00db131a55 | ||
|
|
eb64880203 | ||
|
|
14d06ca0bc | ||
|
|
ce13bd16de | ||
|
|
26bb13fd74 |
@@ -5,7 +5,7 @@
|
|||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<groupId>org.apache.druid.extensions</groupId>
|
<groupId>org.apache.druid.extensions</groupId>
|
||||||
<artifactId>druid-hdrhistogram_0.18.1</artifactId>
|
<artifactId>druid-hdrhistogram_26.0.0</artifactId>
|
||||||
<name>druid-hdrhistogram</name>
|
<name>druid-hdrhistogram</name>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0-SNAPSHOT</version>
|
||||||
|
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
<maven.compiler.source>1.8</maven.compiler.source>
|
<maven.compiler.source>1.8</maven.compiler.source>
|
||||||
<maven.compiler.target>1.8</maven.compiler.target>
|
<maven.compiler.target>1.8</maven.compiler.target>
|
||||||
<druid.version>0.18.1</druid.version>
|
<druid.version>26.0.0</druid.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
@@ -45,6 +45,13 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- Tests -->
|
<!-- Tests -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.easymock</groupId>
|
||||||
|
<artifactId>easymock</artifactId>
|
||||||
|
<version>4.3</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.druid</groupId>
|
<groupId>org.apache.druid</groupId>
|
||||||
<artifactId>druid-processing</artifactId>
|
<artifactId>druid-processing</artifactId>
|
||||||
@@ -54,9 +61,17 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.druid</groupId>
|
<groupId>org.apache.druid</groupId>
|
||||||
<artifactId>druid-benchmarks</artifactId>
|
<artifactId>druid-server</artifactId>
|
||||||
<version>${druid.version}</version>
|
<version>${druid.version}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
|
<type>test-jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.druid</groupId>
|
||||||
|
<artifactId>druid-sql</artifactId>
|
||||||
|
<version>${druid.version}</version>
|
||||||
|
<type>test-jar</type>
|
||||||
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
|
|||||||
@@ -8,9 +8,7 @@ package org.HdrHistogram; /**
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.ObjectInputStream;
|
import java.io.ObjectInputStream;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.zip.DataFormatException;
|
import java.util.zip.DataFormatException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -305,6 +303,35 @@ public class ArrayHistogram extends AbstractHistogram implements Histogramer{
|
|||||||
return percentiles;
|
return percentiles;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, Object> describe() {
|
||||||
|
long min = getMinValue();
|
||||||
|
long max = getMaxValue(); // max = this.maxValue;
|
||||||
|
long count = getTotalCount();
|
||||||
|
double mean = getMean();
|
||||||
|
long sum = (long) (mean * count);
|
||||||
|
mean = Math.round(mean * 100.0) / 100.0;
|
||||||
|
long p25 = getValueAtPercentile(25);
|
||||||
|
long p50 = getValueAtPercentile(50);
|
||||||
|
long p75 = getValueAtPercentile(75);
|
||||||
|
long p90 = getValueAtPercentile(90);
|
||||||
|
long p95 = getValueAtPercentile(95);
|
||||||
|
long p99 = getValueAtPercentile(99);
|
||||||
|
Map<String, Object> rst = new LinkedHashMap<>();
|
||||||
|
rst.put("count", count);
|
||||||
|
rst.put("mean", mean);
|
||||||
|
rst.put("sum", sum);
|
||||||
|
rst.put("min", min);
|
||||||
|
rst.put("p25", p25);
|
||||||
|
rst.put("p50", p50);
|
||||||
|
rst.put("p75", p75);
|
||||||
|
rst.put("p90", p90);
|
||||||
|
rst.put("p95", p95);
|
||||||
|
rst.put("p99", p99);
|
||||||
|
rst.put("max", max);
|
||||||
|
return rst;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Histogramer resetHistogram() {
|
public Histogramer resetHistogram() {
|
||||||
if(isAutoResize()){
|
if(isAutoResize()){
|
||||||
|
|||||||
@@ -2,7 +2,9 @@ package org.HdrHistogram;
|
|||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
public class DirectArrayHistogram extends AbstractHistogram implements Histogramer{
|
public class DirectArrayHistogram extends AbstractHistogram implements Histogramer{
|
||||||
long totalCount;
|
long totalCount;
|
||||||
@@ -172,6 +174,35 @@ public class DirectArrayHistogram extends AbstractHistogram implements Histogram
|
|||||||
return percentiles;
|
return percentiles;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, Object> describe() {
|
||||||
|
long min = getMinValue();
|
||||||
|
long max = getMaxValue(); // max = this.maxValue;
|
||||||
|
long count = getTotalCount();
|
||||||
|
double mean = getMean();
|
||||||
|
long sum = (long) (mean * count);
|
||||||
|
mean = Math.round(mean * 100.0) / 100.0;
|
||||||
|
long p25 = getValueAtPercentile(25);
|
||||||
|
long p50 = getValueAtPercentile(50);
|
||||||
|
long p75 = getValueAtPercentile(75);
|
||||||
|
long p90 = getValueAtPercentile(90);
|
||||||
|
long p95 = getValueAtPercentile(95);
|
||||||
|
long p99 = getValueAtPercentile(99);
|
||||||
|
Map<String, Object> rst = new LinkedHashMap<>();
|
||||||
|
rst.put("count", count);
|
||||||
|
rst.put("mean", mean);
|
||||||
|
rst.put("sum", sum);
|
||||||
|
rst.put("min", min);
|
||||||
|
rst.put("p25", p25);
|
||||||
|
rst.put("p50", p50);
|
||||||
|
rst.put("p75", p75);
|
||||||
|
rst.put("p90", p90);
|
||||||
|
rst.put("p95", p95);
|
||||||
|
rst.put("p99", p99);
|
||||||
|
rst.put("max", max);
|
||||||
|
return rst;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Histogramer resetHistogram() {
|
public Histogramer resetHistogram() {
|
||||||
throw new UnsupportedOperationException("unsupported method");
|
throw new UnsupportedOperationException("unsupported method");
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package org.HdrHistogram;
|
|||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.nio.ByteOrder;
|
import java.nio.ByteOrder;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.zip.DataFormatException;
|
import java.util.zip.DataFormatException;
|
||||||
import java.util.zip.Inflater;
|
import java.util.zip.Inflater;
|
||||||
|
|
||||||
@@ -446,6 +447,11 @@ public class DirectMapHistogram implements Histogramer{
|
|||||||
throw new UnsupportedOperationException("unsupported method");
|
throw new UnsupportedOperationException("unsupported method");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, Object> describe() {
|
||||||
|
throw new UnsupportedOperationException("unsupported method");
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Histogramer resetHistogram() {
|
public Histogramer resetHistogram() {
|
||||||
throw new UnsupportedOperationException("unsupported method");
|
throw new UnsupportedOperationException("unsupported method");
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package org.HdrHistogram;
|
|||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
public class HistogramSketch {
|
public class HistogramSketch {
|
||||||
public Histogramer hisImpl = null;
|
public Histogramer hisImpl = null;
|
||||||
@@ -59,6 +60,10 @@ public class HistogramSketch {
|
|||||||
return hisImpl.percentileList(percentileTicksPerHalfDistance);
|
return hisImpl.percentileList(percentileTicksPerHalfDistance);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Map<String, Object> describe(){
|
||||||
|
return hisImpl.describe();
|
||||||
|
}
|
||||||
|
|
||||||
public static final int getUpdatableSerializationBytes(long lowestDiscernibleValue, long highestTrackableValue, int numberOfSignificantValueDigits){
|
public static final int getUpdatableSerializationBytes(long lowestDiscernibleValue, long highestTrackableValue, int numberOfSignificantValueDigits){
|
||||||
return DirectArrayHistogram.getUpdatableSerializationBytes(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
|
return DirectArrayHistogram.getUpdatableSerializationBytes(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package org.HdrHistogram;
|
|||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
public interface Histogramer {
|
public interface Histogramer {
|
||||||
long getTotalCount();
|
long getTotalCount();
|
||||||
@@ -14,6 +15,8 @@ public interface Histogramer {
|
|||||||
|
|
||||||
List<Percentile> percentileList(int percentileTicksPerHalfDistance);
|
List<Percentile> percentileList(int percentileTicksPerHalfDistance);
|
||||||
|
|
||||||
|
Map<String, Object> describe();
|
||||||
|
|
||||||
Histogramer resetHistogram();
|
Histogramer resetHistogram();
|
||||||
|
|
||||||
Histogramer merge(Histogramer histogram);
|
Histogramer merge(Histogramer histogram);
|
||||||
|
|||||||
@@ -38,4 +38,13 @@ public class Percentile {
|
|||||||
public void setPercentile(double percentile) {
|
public void setPercentile(double percentile) {
|
||||||
this.percentile = percentile;
|
this.percentile = percentile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "Percentile{" +
|
||||||
|
"value=" + value +
|
||||||
|
", count=" + count +
|
||||||
|
", percentile=" + percentile +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
|
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import org.HdrHistogram.DirectHistogram;
|
|
||||||
import org.HdrHistogram.Histogram;
|
|
||||||
import org.HdrHistogram.HistogramSketch;
|
import org.HdrHistogram.HistogramSketch;
|
||||||
import org.HdrHistogram.HistogramUnion;
|
import org.HdrHistogram.HistogramUnion;
|
||||||
import org.apache.druid.java.util.common.IAE;
|
import org.apache.druid.java.util.common.IAE;
|
||||||
@@ -10,6 +8,7 @@ import org.apache.druid.query.aggregation.*;
|
|||||||
import org.apache.druid.query.cache.CacheKeyBuilder;
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
import org.apache.druid.segment.ColumnSelectorFactory;
|
import org.apache.druid.segment.ColumnSelectorFactory;
|
||||||
import org.apache.druid.segment.ColumnValueSelector;
|
import org.apache.druid.segment.ColumnValueSelector;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
@@ -20,9 +19,9 @@ import java.util.Objects;
|
|||||||
public class HdrHistogramAggregatorFactory extends AggregatorFactory {
|
public class HdrHistogramAggregatorFactory extends AggregatorFactory {
|
||||||
public static final long DEFAULT_LOWEST = 1;
|
public static final long DEFAULT_LOWEST = 1;
|
||||||
public static final long DEFAULT_HIGHEST = 2;
|
public static final long DEFAULT_HIGHEST = 2;
|
||||||
public static final int DEFAULT_SIGNIFICANT = 3;
|
public static final int DEFAULT_SIGNIFICANT = 1;
|
||||||
public static final boolean DEFAULT_AUTO_RESIZE = true;
|
public static final boolean DEFAULT_AUTO_RESIZE = true;
|
||||||
public static final long BUFFER_AUTO_RESIZE_HIGHEST = 100000000L * 1000000L;
|
public static final long BUFFER_AUTO_RESIZE_HIGHEST = 100000000L * 100L;
|
||||||
public static final Comparator<HistogramSketch> COMPARATOR =
|
public static final Comparator<HistogramSketch> COMPARATOR =
|
||||||
Comparator.nullsFirst(Comparator.comparingLong(HistogramSketch::getTotalCount));
|
Comparator.nullsFirst(Comparator.comparingLong(HistogramSketch::getTotalCount));
|
||||||
|
|
||||||
@@ -32,6 +31,7 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory {
|
|||||||
protected final long highestTrackableValue;
|
protected final long highestTrackableValue;
|
||||||
protected final int numberOfSignificantValueDigits;
|
protected final int numberOfSignificantValueDigits;
|
||||||
protected final boolean autoResize; //默认是false
|
protected final boolean autoResize; //默认是false
|
||||||
|
protected final int updatableSerializationBytes;
|
||||||
|
|
||||||
public HdrHistogramAggregatorFactory(
|
public HdrHistogramAggregatorFactory(
|
||||||
@JsonProperty("name") String name,
|
@JsonProperty("name") String name,
|
||||||
@@ -81,6 +81,7 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory {
|
|||||||
this.highestTrackableValue = highestTrackableValue;
|
this.highestTrackableValue = highestTrackableValue;
|
||||||
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
||||||
this.autoResize = autoResize;
|
this.autoResize = autoResize;
|
||||||
|
this.updatableSerializationBytes = getUpdatableSerializationBytes();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -208,15 +209,30 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AggregatorFactory withName(String newName) {
|
||||||
|
return new HdrHistogramAggregatorFactory(newName, fieldName, lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, autoResize);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object) {
|
public Object deserialize(Object object) {
|
||||||
|
if (object == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
return HistogramUtils.deserializeHistogram(object);
|
return HistogramUtils.deserializeHistogram(object);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getResultType() {
|
||||||
|
//return ColumnType.LONG;
|
||||||
|
return getIntermediateType();
|
||||||
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public Object finalizeComputation(@Nullable Object object) {
|
public Object finalizeComputation(@Nullable Object object) {
|
||||||
return object == null ? null : ((HistogramSketch) object).getTotalCount();
|
//return object == null ? null : ((HistogramSketch) object).getTotalCount();
|
||||||
|
return object;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -250,9 +266,16 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory {
|
|||||||
return autoResize;
|
return autoResize;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
没这个方法了, 新版本需要实现getIntermediateType方法
|
||||||
@Override
|
@Override
|
||||||
public String getTypeName() {
|
public String getTypeName() {
|
||||||
return HdrHistogramModule.HDRHISTOGRAM_TYPE_NAME;
|
return HdrHistogramModule.HDRHISTOGRAM_TYPE_NAME;
|
||||||
|
}*/
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getIntermediateType() {
|
||||||
|
return HdrHistogramModule.TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -263,6 +286,10 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getMaxIntermediateSize() {
|
public int getMaxIntermediateSize() {
|
||||||
|
return updatableSerializationBytes == 0? getUpdatableSerializationBytes():updatableSerializationBytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
private int getUpdatableSerializationBytes(){
|
||||||
if(!autoResize){
|
if(!autoResize){
|
||||||
/*Histogram histogram = new Histogram(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
|
/*Histogram histogram = new Histogram(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
|
||||||
histogram.setAutoResize(autoResize);
|
histogram.setAutoResize(autoResize);
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
|
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import org.HdrHistogram.Histogram;
|
|
||||||
import org.HdrHistogram.HistogramSketch;
|
import org.HdrHistogram.HistogramSketch;
|
||||||
import org.apache.druid.query.aggregation.Aggregator;
|
import org.apache.druid.query.aggregation.Aggregator;
|
||||||
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.BufferAggregator;
|
import org.apache.druid.query.aggregation.BufferAggregator;
|
||||||
import org.apache.druid.query.cache.CacheKeyBuilder;
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
import org.apache.druid.segment.ColumnSelectorFactory;
|
import org.apache.druid.segment.ColumnSelectorFactory;
|
||||||
@@ -48,6 +48,11 @@ public class HdrHistogramMergeAggregatorFactory extends HdrHistogramAggregatorFa
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AggregatorFactory withName(String newName) {
|
||||||
|
return new HdrHistogramMergeAggregatorFactory(newName, fieldName, lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, autoResize);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getCacheKey() {
|
public byte[] getCacheKey() {
|
||||||
return new CacheKeyBuilder(HdrHistogramModule.CACHE_TYPE_ID_OFFSET).appendByte(HdrHistogramModule.QUANTILES_HDRHISTOGRAM_MERGE_CACHE_TYPE_ID)
|
return new CacheKeyBuilder(HdrHistogramModule.CACHE_TYPE_ID_OFFSET).appendByte(HdrHistogramModule.QUANTILES_HDRHISTOGRAM_MERGE_CACHE_TYPE_ID)
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ public class HdrHistogramMergeBufferAggregator implements BufferAggregator {
|
|||||||
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
||||||
this.autoResize = autoResize;
|
this.autoResize = autoResize;
|
||||||
this.size = size;
|
this.size = size;
|
||||||
LOG.error("HdrHistogramMergeBufferAggregator gene:" + Thread.currentThread().getName() + "-" + Thread.currentThread().getId());
|
//LOG.error("HdrHistogramMergeBufferAggregator gene:" + Thread.currentThread().getName() + "-" + Thread.currentThread().getId());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -83,7 +83,7 @@ public class HdrHistogramMergeBufferAggregator implements BufferAggregator {
|
|||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public synchronized HistogramSketch get(ByteBuffer buf, int position) {
|
public synchronized HistogramSketch get(ByteBuffer buf, int position) {
|
||||||
LOG.error("HdrHistogramMergeBufferAggregator get:" + 0 + "-" + Thread.currentThread().getId() + "-" + this);
|
//LOG.error("HdrHistogramMergeBufferAggregator get:" + 0 + "-" + Thread.currentThread().getId() + "-" + this);
|
||||||
HistogramUnion union = histograms.get(buf).get(position);
|
HistogramUnion union = histograms.get(buf).get(position);
|
||||||
//return histogram.copy();
|
//return histogram.copy();
|
||||||
return union.getResult().copy();
|
return union.getResult().copy();
|
||||||
|
|||||||
@@ -7,13 +7,10 @@ import com.fasterxml.jackson.databind.jsontype.NamedType;
|
|||||||
import com.fasterxml.jackson.databind.module.SimpleModule;
|
import com.fasterxml.jackson.databind.module.SimpleModule;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.inject.Binder;
|
import com.google.inject.Binder;
|
||||||
import org.HdrHistogram.Histogram;
|
|
||||||
import org.HdrHistogram.HistogramSketch;
|
import org.HdrHistogram.HistogramSketch;
|
||||||
import org.apache.druid.initialization.DruidModule;
|
import org.apache.druid.initialization.DruidModule;
|
||||||
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramObjectSqlAggregator;
|
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.*;
|
||||||
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramPercentilesOperatorConversion;
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramQuantileSqlAggregator;
|
|
||||||
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramQuantilesOperatorConversion;
|
|
||||||
import org.apache.druid.segment.serde.ComplexMetrics;
|
import org.apache.druid.segment.serde.ComplexMetrics;
|
||||||
import org.apache.druid.sql.guice.SqlBindings;
|
import org.apache.druid.sql.guice.SqlBindings;
|
||||||
|
|
||||||
@@ -27,8 +24,11 @@ public class HdrHistogramModule implements DruidModule {
|
|||||||
public static final byte QUANTILES_HDRHISTOGRAM_TO_QUANTILE_CACHE_TYPE_ID = 0x03;
|
public static final byte QUANTILES_HDRHISTOGRAM_TO_QUANTILE_CACHE_TYPE_ID = 0x03;
|
||||||
public static final byte QUANTILES_HDRHISTOGRAM_TO_QUANTILES_CACHE_TYPE_ID = 0x04;
|
public static final byte QUANTILES_HDRHISTOGRAM_TO_QUANTILES_CACHE_TYPE_ID = 0x04;
|
||||||
public static final byte QUANTILES_HDRHISTOGRAM_TO_PERCENTILES_CACHE_TYPE_ID = 0x05;
|
public static final byte QUANTILES_HDRHISTOGRAM_TO_PERCENTILES_CACHE_TYPE_ID = 0x05;
|
||||||
|
public static final byte QUANTILES_HDRHISTOGRAM_TO_DESCRIBE_CACHE_TYPE_ID = 0x06;
|
||||||
|
public static final byte QUANTILES_HDRHISTOGRAM_TO_PERCENTILES_DESCRIBE_CACHE_TYPE_ID = 0x07;
|
||||||
|
|
||||||
public static final String HDRHISTOGRAM_TYPE_NAME = "HdrHistogramSketch";
|
public static final String HDRHISTOGRAM_TYPE_NAME = "HdrHistogramSketch";
|
||||||
|
public static final ColumnType TYPE = ColumnType.ofComplex(HDRHISTOGRAM_TYPE_NAME);
|
||||||
|
|
||||||
public static final ObjectMapper objectMapper = new ObjectMapper();
|
public static final ObjectMapper objectMapper = new ObjectMapper();
|
||||||
|
|
||||||
@@ -49,6 +49,8 @@ public class HdrHistogramModule implements DruidModule {
|
|||||||
|
|
||||||
SqlBindings.addOperatorConversion(binder, HdrHistogramQuantilesOperatorConversion.class);
|
SqlBindings.addOperatorConversion(binder, HdrHistogramQuantilesOperatorConversion.class);
|
||||||
SqlBindings.addOperatorConversion(binder, HdrHistogramPercentilesOperatorConversion.class);
|
SqlBindings.addOperatorConversion(binder, HdrHistogramPercentilesOperatorConversion.class);
|
||||||
|
SqlBindings.addOperatorConversion(binder, HdrHistogramDescribeOperatorConversion.class);
|
||||||
|
SqlBindings.addOperatorConversion(binder, HdrHistogramPercentilesDescribeOperatorConversion.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -60,7 +62,9 @@ public class HdrHistogramModule implements DruidModule {
|
|||||||
new NamedType(HdrHistogramMergeAggregatorFactory.class, "HdrHistogramSketchMerge"),
|
new NamedType(HdrHistogramMergeAggregatorFactory.class, "HdrHistogramSketchMerge"),
|
||||||
new NamedType(HdrHistogramToQuantilePostAggregator.class, "HdrHistogramSketchToQuantile"),
|
new NamedType(HdrHistogramToQuantilePostAggregator.class, "HdrHistogramSketchToQuantile"),
|
||||||
new NamedType(HdrHistogramToQuantilesPostAggregator.class, "HdrHistogramSketchToQuantiles"),
|
new NamedType(HdrHistogramToQuantilesPostAggregator.class, "HdrHistogramSketchToQuantiles"),
|
||||||
new NamedType(HdrHistogramToPercentilesPostAggregator.class, "HdrHistogramSketchToPercentiles")
|
new NamedType(HdrHistogramToPercentilesPostAggregator.class, "HdrHistogramSketchToPercentiles"),
|
||||||
|
new NamedType(HdrHistogramToDescribePostAggregator.class, "HdrHistogramSketchToDescribe"),
|
||||||
|
new NamedType(HdrHistogramToPercentilesDescribePostAggregator.class, "HdrHistogramSketchToPercentilesDescription")
|
||||||
).addSerializer(HistogramSketch.class, new HistogramJsonSerializer())
|
).addSerializer(HistogramSketch.class, new HistogramJsonSerializer())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,108 @@
|
|||||||
|
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
|
import org.HdrHistogram.HistogramSketch;
|
||||||
|
import org.apache.druid.java.util.common.IAE;
|
||||||
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
|
import org.apache.druid.segment.ColumnInspector;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
public class HdrHistogramToDescribePostAggregator implements PostAggregator {
|
||||||
|
private final String name;
|
||||||
|
private final String fieldName;
|
||||||
|
|
||||||
|
@JsonCreator
|
||||||
|
public HdrHistogramToDescribePostAggregator(
|
||||||
|
@JsonProperty("name") String name,
|
||||||
|
@JsonProperty("fieldName") String fieldName
|
||||||
|
){
|
||||||
|
this.name = name;
|
||||||
|
this.fieldName = fieldName;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getType(ColumnInspector signature){
|
||||||
|
return ColumnType.STRING;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@JsonProperty
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
public String getFieldName() {
|
||||||
|
return fieldName;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
@Override
|
||||||
|
public Object compute(Map<String, Object> values) {
|
||||||
|
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
|
||||||
|
if(histogram == null){
|
||||||
|
return "{}"; //"[]"
|
||||||
|
}
|
||||||
|
return HdrHistogramModule.toJson(histogram.describe());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Comparator<double[]> getComparator()
|
||||||
|
{
|
||||||
|
throw new IAE("Comparing arrays of quantiles is not supported");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<String> getDependentFields()
|
||||||
|
{
|
||||||
|
return Sets.newHashSet(fieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PostAggregator decorate(Map<String, AggregatorFactory> aggregators) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public byte[] getCacheKey() {
|
||||||
|
CacheKeyBuilder builder = new CacheKeyBuilder(HdrHistogramModule.CACHE_TYPE_ID_OFFSET).appendByte(HdrHistogramModule.QUANTILES_HDRHISTOGRAM_TO_DESCRIBE_CACHE_TYPE_ID)
|
||||||
|
.appendString(fieldName);
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
HdrHistogramToDescribePostAggregator that = (HdrHistogramToDescribePostAggregator) o;
|
||||||
|
|
||||||
|
return name.equals(that.name) &&
|
||||||
|
fieldName.equals(that.fieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(name, fieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "HdrHistogramToDescribePostAggregator{" +
|
||||||
|
"name='" + name + '\'' +
|
||||||
|
", fieldName='" + fieldName + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,125 @@
|
|||||||
|
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
|
import org.HdrHistogram.HistogramSketch;
|
||||||
|
import org.HdrHistogram.Percentile;
|
||||||
|
import org.apache.druid.java.util.common.IAE;
|
||||||
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
|
import org.apache.druid.segment.ColumnInspector;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
public class HdrHistogramToPercentilesDescribePostAggregator implements PostAggregator {
|
||||||
|
private final String name;
|
||||||
|
private final String fieldName;
|
||||||
|
private final int percentileTicksPerHalfDistance;
|
||||||
|
|
||||||
|
@JsonCreator
|
||||||
|
public HdrHistogramToPercentilesDescribePostAggregator(
|
||||||
|
@JsonProperty("name") String name,
|
||||||
|
@JsonProperty("fieldName") String fieldName,
|
||||||
|
@JsonProperty("percentileTicksPerHalfDistance") int percentileTicksPerHalfDistance
|
||||||
|
){
|
||||||
|
this.name = name;
|
||||||
|
this.fieldName = fieldName;
|
||||||
|
this.percentileTicksPerHalfDistance = percentileTicksPerHalfDistance;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getType(ColumnInspector signature){
|
||||||
|
return ColumnType.STRING;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@JsonProperty
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
public String getFieldName() {
|
||||||
|
return fieldName;
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
public int getPercentileTicksPerHalfDistance() {
|
||||||
|
return percentileTicksPerHalfDistance;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
@Override
|
||||||
|
public Object compute(Map<String, Object> values) {
|
||||||
|
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
|
||||||
|
if(histogram == null){
|
||||||
|
return "{\"percentiles\":[],\"describe\":{}}";
|
||||||
|
}
|
||||||
|
List<Percentile> percentiles = histogram.percentileList(percentileTicksPerHalfDistance);
|
||||||
|
Map<String, Object> describe = histogram.describe();
|
||||||
|
Map<String, Object> rst = new LinkedHashMap<>();
|
||||||
|
rst.put("percentiles", percentiles);
|
||||||
|
rst.put("description", describe);
|
||||||
|
return HdrHistogramModule.toJson(rst);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Comparator<double[]> getComparator()
|
||||||
|
{
|
||||||
|
throw new IAE("Comparing object is not supported");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<String> getDependentFields()
|
||||||
|
{
|
||||||
|
return Sets.newHashSet(fieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PostAggregator decorate(Map<String, AggregatorFactory> aggregators) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public byte[] getCacheKey() {
|
||||||
|
CacheKeyBuilder builder = new CacheKeyBuilder(HdrHistogramModule.CACHE_TYPE_ID_OFFSET).appendByte(HdrHistogramModule.QUANTILES_HDRHISTOGRAM_TO_PERCENTILES_DESCRIBE_CACHE_TYPE_ID)
|
||||||
|
.appendString(fieldName);
|
||||||
|
builder.appendInt(percentileTicksPerHalfDistance);
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
HdrHistogramToPercentilesDescribePostAggregator that = (HdrHistogramToPercentilesDescribePostAggregator) o;
|
||||||
|
|
||||||
|
return percentileTicksPerHalfDistance == that.percentileTicksPerHalfDistance &&
|
||||||
|
name.equals(that.name) &&
|
||||||
|
fieldName.equals(that.fieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(name, fieldName, percentileTicksPerHalfDistance);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "HdrHistogramToPercentilesDescribePostAggregator{" +
|
||||||
|
"name='" + name + '\'' +
|
||||||
|
", fieldName='" + fieldName + '\'' +
|
||||||
|
", probabilitys=" + percentileTicksPerHalfDistance +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@@ -9,6 +9,8 @@ import org.apache.druid.java.util.common.IAE;
|
|||||||
import org.apache.druid.query.aggregation.AggregatorFactory;
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.PostAggregator;
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
import org.apache.druid.query.cache.CacheKeyBuilder;
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
|
import org.apache.druid.segment.ColumnInspector;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
@@ -29,6 +31,11 @@ public class HdrHistogramToPercentilesPostAggregator implements PostAggregator {
|
|||||||
this.percentileTicksPerHalfDistance = percentileTicksPerHalfDistance;
|
this.percentileTicksPerHalfDistance = percentileTicksPerHalfDistance;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getType(ColumnInspector signature){
|
||||||
|
return ColumnType.STRING;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
public String getName() {
|
public String getName() {
|
||||||
@@ -49,6 +56,9 @@ public class HdrHistogramToPercentilesPostAggregator implements PostAggregator {
|
|||||||
@Override
|
@Override
|
||||||
public Object compute(Map<String, Object> values) {
|
public Object compute(Map<String, Object> values) {
|
||||||
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
|
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
|
||||||
|
if(histogram == null){
|
||||||
|
return "[]"; //"[]"
|
||||||
|
}
|
||||||
List<Percentile> percentiles = histogram.percentileList(percentileTicksPerHalfDistance);
|
List<Percentile> percentiles = histogram.percentileList(percentileTicksPerHalfDistance);
|
||||||
return HdrHistogramModule.toJson(percentiles);
|
return HdrHistogramModule.toJson(percentiles);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,13 +2,16 @@ package org.apache.druid.query.aggregation.sketch.HdrHistogram;
|
|||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.google.common.collect.Ordering;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import org.HdrHistogram.Histogram;
|
import com.google.common.primitives.Longs;
|
||||||
import org.HdrHistogram.HistogramSketch;
|
import org.HdrHistogram.HistogramSketch;
|
||||||
import org.apache.druid.java.util.common.IAE;
|
import org.apache.druid.java.util.common.IAE;
|
||||||
import org.apache.druid.query.aggregation.AggregatorFactory;
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.PostAggregator;
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
import org.apache.druid.query.cache.CacheKeyBuilder;
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
|
import org.apache.druid.segment.ColumnInspector;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
@@ -21,6 +24,14 @@ public class HdrHistogramToQuantilePostAggregator implements PostAggregator {
|
|||||||
private final String fieldName;
|
private final String fieldName;
|
||||||
private final float probability;
|
private final float probability;
|
||||||
|
|
||||||
|
static final Comparator COMPARATOR = new Ordering(){
|
||||||
|
@Override
|
||||||
|
public int compare(Object o, Object o1)
|
||||||
|
{
|
||||||
|
return Longs.compare(((Number) o).longValue(), ((Number) o1).longValue());
|
||||||
|
}
|
||||||
|
}.nullsFirst();
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public HdrHistogramToQuantilePostAggregator(
|
public HdrHistogramToQuantilePostAggregator(
|
||||||
@JsonProperty("name") String name,
|
@JsonProperty("name") String name,
|
||||||
@@ -36,25 +47,28 @@ public class HdrHistogramToQuantilePostAggregator implements PostAggregator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getType(ColumnInspector signature){
|
||||||
|
return ColumnType.LONG;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<String> getDependentFields() {
|
public Set<String> getDependentFields() {
|
||||||
return Sets.newHashSet(fieldName);
|
return Sets.newHashSet(fieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Comparator getComparator() {
|
public Comparator<Long> getComparator() {
|
||||||
return new Comparator<Long>(){
|
return COMPARATOR;
|
||||||
@Override
|
|
||||||
public int compare(final Long a, final Long b){
|
|
||||||
return Long.compare(a, b);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public Object compute(Map<String, Object> values) {
|
public Object compute(Map<String, Object> values) {
|
||||||
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
|
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
|
||||||
|
if(histogram == null){
|
||||||
|
return null;
|
||||||
|
}
|
||||||
return histogram.getValueAtPercentile(probability * 100);
|
return histogram.getValueAtPercentile(probability * 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ import org.apache.druid.java.util.common.IAE;
|
|||||||
import org.apache.druid.query.aggregation.AggregatorFactory;
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.PostAggregator;
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
import org.apache.druid.query.cache.CacheKeyBuilder;
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
|
import org.apache.druid.segment.ColumnInspector;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
@@ -29,6 +31,11 @@ public class HdrHistogramToQuantilesPostAggregator implements PostAggregator {
|
|||||||
this.probabilitys = probabilitys;
|
this.probabilitys = probabilitys;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getType(ColumnInspector signature){
|
||||||
|
return ColumnType.LONG_ARRAY;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
public String getName() {
|
public String getName() {
|
||||||
@@ -49,7 +56,11 @@ public class HdrHistogramToQuantilesPostAggregator implements PostAggregator {
|
|||||||
@Override
|
@Override
|
||||||
public Object compute(Map<String, Object> values) {
|
public Object compute(Map<String, Object> values) {
|
||||||
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
|
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
|
||||||
final long[] counts = new long[probabilitys.length];
|
if(histogram == null){
|
||||||
|
//return null;
|
||||||
|
return new Long[probabilitys.length];
|
||||||
|
}
|
||||||
|
final Long[] counts = new Long[probabilitys.length];
|
||||||
for (int i = 0; i < probabilitys.length; i++) {
|
for (int i = 0; i < probabilitys.length; i++) {
|
||||||
counts[i] = histogram.getValueAtPercentile(probabilitys[i] * 100);
|
counts[i] = histogram.getValueAtPercentile(probabilitys[i] * 100);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,77 @@
|
|||||||
|
package org.apache.druid.query.aggregation.sketch.HdrHistogram.sql;
|
||||||
|
|
||||||
|
import org.apache.calcite.rex.RexCall;
|
||||||
|
import org.apache.calcite.rex.RexNode;
|
||||||
|
import org.apache.calcite.sql.SqlFunction;
|
||||||
|
import org.apache.calcite.sql.SqlOperator;
|
||||||
|
import org.apache.calcite.sql.type.ReturnTypes;
|
||||||
|
import org.apache.calcite.sql.type.SqlTypeFamily;
|
||||||
|
import org.apache.calcite.sql.type.SqlTypeName;
|
||||||
|
import org.apache.druid.java.util.common.StringUtils;
|
||||||
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
|
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
|
||||||
|
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramToDescribePostAggregator;
|
||||||
|
import org.apache.druid.segment.column.RowSignature;
|
||||||
|
import org.apache.druid.sql.calcite.expression.DruidExpression;
|
||||||
|
import org.apache.druid.sql.calcite.expression.OperatorConversions;
|
||||||
|
import org.apache.druid.sql.calcite.expression.PostAggregatorVisitor;
|
||||||
|
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
|
||||||
|
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class HdrHistogramDescribeOperatorConversion implements SqlOperatorConversion {
|
||||||
|
private static final String FUNCTION_NAME = "HDR_DESCRIBE";
|
||||||
|
private static final SqlFunction SQL_FUNCTION = OperatorConversions
|
||||||
|
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
|
||||||
|
.operandTypes(SqlTypeFamily.ANY)
|
||||||
|
.requiredOperands(1)
|
||||||
|
.returnTypeInference(ReturnTypes.explicit(SqlTypeName.VARCHAR))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SqlOperator calciteOperator()
|
||||||
|
{
|
||||||
|
return SQL_FUNCTION;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DruidExpression toDruidExpression(
|
||||||
|
PlannerContext plannerContext,
|
||||||
|
RowSignature rowSignature,
|
||||||
|
RexNode rexNode
|
||||||
|
)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
@Override
|
||||||
|
public PostAggregator toPostAggregator(
|
||||||
|
PlannerContext plannerContext,
|
||||||
|
RowSignature rowSignature,
|
||||||
|
RexNode rexNode,
|
||||||
|
PostAggregatorVisitor postAggregatorVisitor
|
||||||
|
)
|
||||||
|
{
|
||||||
|
final List<RexNode> operands = ((RexCall) rexNode).getOperands();
|
||||||
|
final PostAggregator postAgg = OperatorConversions.toPostAggregator(
|
||||||
|
plannerContext,
|
||||||
|
rowSignature,
|
||||||
|
operands.get(0),
|
||||||
|
postAggregatorVisitor,
|
||||||
|
true
|
||||||
|
);
|
||||||
|
|
||||||
|
if (postAgg == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
return new HdrHistogramToDescribePostAggregator(
|
||||||
|
postAggregatorVisitor.getOutputNamePrefix() + postAggregatorVisitor.getAndIncrementCounter(),
|
||||||
|
((FieldAccessPostAggregator)postAgg).getFieldName()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -18,6 +18,7 @@ import org.apache.druid.query.aggregation.AggregatorFactory;
|
|||||||
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramAggregatorFactory;
|
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramAggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramMergeAggregatorFactory;
|
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramMergeAggregatorFactory;
|
||||||
import org.apache.druid.segment.VirtualColumn;
|
import org.apache.druid.segment.VirtualColumn;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
import org.apache.druid.segment.column.RowSignature;
|
import org.apache.druid.segment.column.RowSignature;
|
||||||
import org.apache.druid.segment.column.ValueType;
|
import org.apache.druid.segment.column.ValueType;
|
||||||
import org.apache.druid.sql.calcite.aggregation.Aggregation;
|
import org.apache.druid.sql.calcite.aggregation.Aggregation;
|
||||||
@@ -118,11 +119,11 @@ public class HdrHistogramObjectSqlAggregator implements SqlAggregator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// No existing match found. Create a new one.
|
// No existing match found. Create a new one.
|
||||||
final List<VirtualColumn> virtualColumns = new ArrayList<>();
|
// 新版本删除了final List<VirtualColumn> virtualColumns = new ArrayList<>();
|
||||||
|
|
||||||
if (input.isDirectColumnAccess()) {
|
if (input.isDirectColumnAccess()) {
|
||||||
// 参数是Histogram对象
|
// 参数是Histogram对象
|
||||||
if (rowSignature.getColumnType(input.getDirectColumn()).orElse(null) == ValueType.COMPLEX) {
|
if (rowSignature.getColumnType(input.getDirectColumn()).map(type -> type.is(ValueType.COMPLEX)).orElse(false)) {
|
||||||
aggregatorFactory = new HdrHistogramMergeAggregatorFactory(
|
aggregatorFactory = new HdrHistogramMergeAggregatorFactory(
|
||||||
histogramName,
|
histogramName,
|
||||||
input.getDirectColumn(),
|
input.getDirectColumn(),
|
||||||
@@ -142,12 +143,11 @@ public class HdrHistogramObjectSqlAggregator implements SqlAggregator {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
final VirtualColumn virtualColumn =
|
final String virtualColumnName =
|
||||||
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, input, SqlTypeName.BIGINT);
|
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(input, ColumnType.LONG);
|
||||||
virtualColumns.add(virtualColumn);
|
|
||||||
aggregatorFactory = new HdrHistogramAggregatorFactory(
|
aggregatorFactory = new HdrHistogramAggregatorFactory(
|
||||||
histogramName,
|
histogramName,
|
||||||
virtualColumn.getOutputName(),
|
virtualColumnName,
|
||||||
lowestDiscernibleValue,
|
lowestDiscernibleValue,
|
||||||
highestTrackableValue,
|
highestTrackableValue,
|
||||||
numberOfSignificantValueDigits,
|
numberOfSignificantValueDigits,
|
||||||
@@ -156,7 +156,6 @@ public class HdrHistogramObjectSqlAggregator implements SqlAggregator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return Aggregation.create(
|
return Aggregation.create(
|
||||||
virtualColumns,
|
|
||||||
ImmutableList.of(aggregatorFactory),
|
ImmutableList.of(aggregatorFactory),
|
||||||
null
|
null
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -0,0 +1,88 @@
|
|||||||
|
package org.apache.druid.query.aggregation.sketch.HdrHistogram.sql;
|
||||||
|
|
||||||
|
import org.apache.calcite.rex.RexCall;
|
||||||
|
import org.apache.calcite.rex.RexLiteral;
|
||||||
|
import org.apache.calcite.rex.RexNode;
|
||||||
|
import org.apache.calcite.sql.SqlFunction;
|
||||||
|
import org.apache.calcite.sql.SqlKind;
|
||||||
|
import org.apache.calcite.sql.SqlOperator;
|
||||||
|
import org.apache.calcite.sql.type.ReturnTypes;
|
||||||
|
import org.apache.calcite.sql.type.SqlTypeFamily;
|
||||||
|
import org.apache.calcite.sql.type.SqlTypeName;
|
||||||
|
import org.apache.druid.java.util.common.StringUtils;
|
||||||
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
|
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
|
||||||
|
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramToPercentilesDescribePostAggregator;
|
||||||
|
import org.apache.druid.segment.column.RowSignature;
|
||||||
|
import org.apache.druid.sql.calcite.expression.DruidExpression;
|
||||||
|
import org.apache.druid.sql.calcite.expression.OperatorConversions;
|
||||||
|
import org.apache.druid.sql.calcite.expression.PostAggregatorVisitor;
|
||||||
|
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
|
||||||
|
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class HdrHistogramPercentilesDescribeOperatorConversion implements SqlOperatorConversion {
|
||||||
|
private static final String FUNCTION_NAME = "HDR_GET_PERCENTILES_DESCRIPTION";
|
||||||
|
private static final SqlFunction SQL_FUNCTION = OperatorConversions
|
||||||
|
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
|
||||||
|
.operandTypes(SqlTypeFamily.ANY, SqlTypeFamily.NUMERIC)
|
||||||
|
.requiredOperands(1)
|
||||||
|
.returnTypeInference(ReturnTypes.explicit(SqlTypeName.VARCHAR))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SqlOperator calciteOperator()
|
||||||
|
{
|
||||||
|
return SQL_FUNCTION;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DruidExpression toDruidExpression(
|
||||||
|
PlannerContext plannerContext,
|
||||||
|
RowSignature rowSignature,
|
||||||
|
RexNode rexNode
|
||||||
|
)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
@Override
|
||||||
|
public PostAggregator toPostAggregator(
|
||||||
|
PlannerContext plannerContext,
|
||||||
|
RowSignature rowSignature,
|
||||||
|
RexNode rexNode,
|
||||||
|
PostAggregatorVisitor postAggregatorVisitor
|
||||||
|
)
|
||||||
|
{
|
||||||
|
final List<RexNode> operands = ((RexCall) rexNode).getOperands();
|
||||||
|
final PostAggregator postAgg = OperatorConversions.toPostAggregator(
|
||||||
|
plannerContext,
|
||||||
|
rowSignature,
|
||||||
|
operands.get(0),
|
||||||
|
postAggregatorVisitor,
|
||||||
|
true
|
||||||
|
);
|
||||||
|
|
||||||
|
if (postAgg == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
int percentileTicksPerHalfDistance = 5;
|
||||||
|
if (operands.size() == 2) {
|
||||||
|
if (!operands.get(1).isA(SqlKind.LITERAL)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
percentileTicksPerHalfDistance = RexLiteral.intValue(operands.get(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
return new HdrHistogramToPercentilesDescribePostAggregator(
|
||||||
|
postAggregatorVisitor.getOutputNamePrefix() + postAggregatorVisitor.getAndIncrementCounter(),
|
||||||
|
((FieldAccessPostAggregator)postAgg).getFieldName(),
|
||||||
|
percentileTicksPerHalfDistance
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -14,16 +14,16 @@ import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramToPerc
|
|||||||
import org.apache.druid.query.aggregation.PostAggregator;
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
|
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
|
||||||
import org.apache.druid.segment.column.RowSignature;
|
import org.apache.druid.segment.column.RowSignature;
|
||||||
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
|
|
||||||
import org.apache.druid.sql.calcite.expression.DruidExpression;
|
import org.apache.druid.sql.calcite.expression.DruidExpression;
|
||||||
import org.apache.druid.sql.calcite.expression.OperatorConversions;
|
import org.apache.druid.sql.calcite.expression.OperatorConversions;
|
||||||
import org.apache.druid.sql.calcite.expression.PostAggregatorVisitor;
|
import org.apache.druid.sql.calcite.expression.PostAggregatorVisitor;
|
||||||
|
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
|
||||||
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class HdrHistogramPercentilesOperatorConversion extends DirectOperatorConversion {
|
public class HdrHistogramPercentilesOperatorConversion implements SqlOperatorConversion {
|
||||||
private static final String FUNCTION_NAME = "HDR_GET_PERCENTILES";
|
private static final String FUNCTION_NAME = "HDR_GET_PERCENTILES";
|
||||||
private static final SqlFunction SQL_FUNCTION = OperatorConversions
|
private static final SqlFunction SQL_FUNCTION = OperatorConversions
|
||||||
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
|
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
|
||||||
@@ -32,10 +32,6 @@ public class HdrHistogramPercentilesOperatorConversion extends DirectOperatorCon
|
|||||||
.returnTypeInference(ReturnTypes.explicit(SqlTypeName.VARCHAR))
|
.returnTypeInference(ReturnTypes.explicit(SqlTypeName.VARCHAR))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
public HdrHistogramPercentilesOperatorConversion() {
|
|
||||||
super(SQL_FUNCTION, FUNCTION_NAME);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqlOperator calciteOperator()
|
public SqlOperator calciteOperator()
|
||||||
{
|
{
|
||||||
@@ -66,7 +62,8 @@ public class HdrHistogramPercentilesOperatorConversion extends DirectOperatorCon
|
|||||||
plannerContext,
|
plannerContext,
|
||||||
rowSignature,
|
rowSignature,
|
||||||
operands.get(0),
|
operands.get(0),
|
||||||
postAggregatorVisitor
|
postAggregatorVisitor,
|
||||||
|
true
|
||||||
);
|
);
|
||||||
|
|
||||||
if (postAgg == null) {
|
if (postAgg == null) {
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramAggreg
|
|||||||
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramMergeAggregatorFactory;
|
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramMergeAggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramToQuantilePostAggregator;
|
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramToQuantilePostAggregator;
|
||||||
import org.apache.druid.segment.VirtualColumn;
|
import org.apache.druid.segment.VirtualColumn;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
import org.apache.druid.segment.column.RowSignature;
|
import org.apache.druid.segment.column.RowSignature;
|
||||||
import org.apache.druid.segment.column.ValueType;
|
import org.apache.druid.segment.column.ValueType;
|
||||||
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
|
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
|
||||||
@@ -141,22 +142,16 @@ public class HdrHistogramQuantileSqlAggregator implements SqlAggregator {
|
|||||||
|
|
||||||
// Check input for equivalence.
|
// Check input for equivalence.
|
||||||
final boolean inputMatches;
|
final boolean inputMatches;
|
||||||
final VirtualColumn virtualInput = existing.getVirtualColumns()
|
final DruidExpression virtualInput =
|
||||||
.stream()
|
virtualColumnRegistry.findVirtualColumnExpressions(theFactory.requiredFields())
|
||||||
.filter(
|
.stream()
|
||||||
virtualColumn ->
|
.findFirst()
|
||||||
virtualColumn.getOutputName()
|
.orElse(null);
|
||||||
.equals(theFactory.getFieldName())
|
|
||||||
)
|
|
||||||
.findFirst()
|
|
||||||
.orElse(null);
|
|
||||||
|
|
||||||
if (virtualInput == null) {
|
if (virtualInput == null) {
|
||||||
inputMatches = input.isDirectColumnAccess()
|
inputMatches = input.isDirectColumnAccess() && input.getDirectColumn().equals(theFactory.getFieldName());
|
||||||
&& input.getDirectColumn().equals(theFactory.getFieldName());
|
|
||||||
} else {
|
} else {
|
||||||
inputMatches = ((ExpressionVirtualColumn) virtualInput).getExpression()
|
inputMatches = virtualInput.equals(input);
|
||||||
.equals(input.getExpression());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
final boolean matches = inputMatches
|
final boolean matches = inputMatches
|
||||||
@@ -177,11 +172,11 @@ public class HdrHistogramQuantileSqlAggregator implements SqlAggregator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// No existing match found. Create a new one.
|
// No existing match found. Create a new one.
|
||||||
final List<VirtualColumn> virtualColumns = new ArrayList<>();
|
//final List<VirtualColumn> virtualColumns = new ArrayList<>();
|
||||||
|
|
||||||
if (input.isDirectColumnAccess()) {
|
if (input.isDirectColumnAccess()) {
|
||||||
// 参数是Histogram对象
|
// 参数是Histogram对象
|
||||||
if (rowSignature.getColumnType(input.getDirectColumn()).orElse(null) == ValueType.COMPLEX) {
|
if (rowSignature.getColumnType(input.getDirectColumn()).map(type -> type.is(ValueType.COMPLEX)).orElse(false)) {
|
||||||
aggregatorFactory = new HdrHistogramMergeAggregatorFactory(
|
aggregatorFactory = new HdrHistogramMergeAggregatorFactory(
|
||||||
histogramName,
|
histogramName,
|
||||||
input.getDirectColumn(),
|
input.getDirectColumn(),
|
||||||
@@ -201,12 +196,11 @@ public class HdrHistogramQuantileSqlAggregator implements SqlAggregator {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
final VirtualColumn virtualColumn =
|
final String virtualColumnName =
|
||||||
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, input, SqlTypeName.BIGINT);
|
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(input, ColumnType.LONG);
|
||||||
virtualColumns.add(virtualColumn);
|
|
||||||
aggregatorFactory = new HdrHistogramAggregatorFactory(
|
aggregatorFactory = new HdrHistogramAggregatorFactory(
|
||||||
histogramName,
|
histogramName,
|
||||||
virtualColumn.getOutputName(),
|
virtualColumnName,
|
||||||
lowestDiscernibleValue,
|
lowestDiscernibleValue,
|
||||||
highestTrackableValue,
|
highestTrackableValue,
|
||||||
numberOfSignificantValueDigits,
|
numberOfSignificantValueDigits,
|
||||||
@@ -234,7 +228,6 @@ public class HdrHistogramQuantileSqlAggregator implements SqlAggregator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return Aggregation.create(
|
return Aggregation.create(
|
||||||
virtualColumns,
|
|
||||||
ImmutableList.of(aggregatorFactory),
|
ImmutableList.of(aggregatorFactory),
|
||||||
new HdrHistogramToQuantilePostAggregator(name, histogramName, probability)
|
new HdrHistogramToQuantilePostAggregator(name, histogramName, probability)
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -62,50 +62,30 @@ public class HdrHistogramQuantilesOperatorConversion implements SqlOperatorConve
|
|||||||
{
|
{
|
||||||
final List<RexNode> operands = ((RexCall) rexNode).getOperands();
|
final List<RexNode> operands = ((RexCall) rexNode).getOperands();
|
||||||
final float[] args = new float[operands.size() - 1];
|
final float[] args = new float[operands.size() - 1];
|
||||||
PostAggregator postAgg = null;
|
|
||||||
|
|
||||||
int operandCounter = 0;
|
// 新版本直接就从第一个参数取
|
||||||
for (RexNode operand : operands) {
|
final PostAggregator inputSketchPostAgg = OperatorConversions.toPostAggregator(
|
||||||
final PostAggregator convertedPostAgg = OperatorConversions.toPostAggregator(
|
plannerContext,
|
||||||
plannerContext,
|
rowSignature,
|
||||||
rowSignature,
|
operands.get(0),
|
||||||
operand,
|
postAggregatorVisitor,
|
||||||
postAggregatorVisitor
|
true
|
||||||
);
|
);
|
||||||
if (convertedPostAgg == null) {
|
|
||||||
if (operandCounter > 0) {
|
if (inputSketchPostAgg == null) {
|
||||||
try {
|
return null;
|
||||||
if (!operand.isA(SqlKind.LITERAL)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
float arg = ((Number) RexLiteral.value(operand)).floatValue();
|
|
||||||
args[operandCounter - 1] = arg;
|
|
||||||
}
|
|
||||||
catch (ClassCastException cce) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (operandCounter == 0) {
|
|
||||||
postAgg = convertedPostAgg;
|
|
||||||
} else {
|
|
||||||
if (!operand.isA(SqlKind.LITERAL)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
operandCounter++;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (postAgg == null) {
|
// 直接解析
|
||||||
return null;
|
for (int i = 1; i < operands.size(); i++) {
|
||||||
|
RexNode operand = operands.get(i);
|
||||||
|
float arg = ((Number) RexLiteral.value(operand)).floatValue();
|
||||||
|
args[i - 1] = arg;
|
||||||
}
|
}
|
||||||
|
|
||||||
return new HdrHistogramToQuantilesPostAggregator(
|
return new HdrHistogramToQuantilesPostAggregator(
|
||||||
postAggregatorVisitor.getOutputNamePrefix() + postAggregatorVisitor.getAndIncrementCounter(),
|
postAggregatorVisitor.getOutputNamePrefix() + postAggregatorVisitor.getAndIncrementCounter(),
|
||||||
((FieldAccessPostAggregator)postAgg).getFieldName(),
|
((FieldAccessPostAggregator)inputSketchPostAgg).getFieldName(),
|
||||||
args
|
args
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,17 +2,13 @@ package org.apache.druid.query.aggregation.sketch.HdrHistogram;
|
|||||||
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.HdrHistogram.*;
|
import org.HdrHistogram.*;
|
||||||
import org.apache.datasketches.theta.Sketches;
|
|
||||||
import org.apache.datasketches.theta.UpdateSketch;
|
|
||||||
import org.apache.druid.data.input.MapBasedRow;
|
import org.apache.druid.data.input.MapBasedRow;
|
||||||
import org.apache.druid.query.aggregation.AggregatorFactory;
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.BufferAggregator;
|
import org.apache.druid.query.aggregation.BufferAggregator;
|
||||||
import org.apache.druid.query.aggregation.TestLongColumnSelector;
|
import org.apache.druid.query.aggregation.TestLongColumnSelector;
|
||||||
import org.apache.druid.query.aggregation.TestObjectColumnSelector;
|
import org.apache.druid.query.aggregation.TestObjectColumnSelector;
|
||||||
import org.apache.druid.query.aggregation.datasketches.theta.SketchHolder;
|
|
||||||
import org.apache.druid.query.aggregation.datasketches.theta.SketchMergeAggregatorFactory;
|
|
||||||
import org.apache.druid.query.groupby.epinephelinae.GrouperTestUtil;
|
import org.apache.druid.query.groupby.epinephelinae.GrouperTestUtil;
|
||||||
import org.apache.druid.query.groupby.epinephelinae.TestColumnSelectorFactory;
|
import org.apache.druid.query.groupby.epinephelinae.GroupByTestColumnSelectorFactory;
|
||||||
import org.apache.druid.segment.ColumnSelectorFactory;
|
import org.apache.druid.segment.ColumnSelectorFactory;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@@ -230,7 +226,7 @@ public class HdrHistogramBufferAggregatorTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMergeAggregatorRelocate() {
|
public void testMergeAggregatorRelocate() {
|
||||||
final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
|
final GroupByTestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
|
||||||
HistogramSketch histogram = new HistogramSketch(3);
|
HistogramSketch histogram = new HistogramSketch(3);
|
||||||
for (int i = 0; i < 100000; i++) {
|
for (int i = 0; i < 100000; i++) {
|
||||||
histogram.recordValue(i);
|
histogram.recordValue(i);
|
||||||
@@ -252,7 +248,7 @@ public class HdrHistogramBufferAggregatorTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAggregatorRelocate() {
|
public void testAggregatorRelocate() {
|
||||||
final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
|
final GroupByTestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
|
||||||
HistogramSketch histogram = new HistogramSketch(3);
|
HistogramSketch histogram = new HistogramSketch(3);
|
||||||
for (int i = 0; i < 100000; i++) {
|
for (int i = 0; i < 100000; i++) {
|
||||||
histogram.recordValue(i);
|
histogram.recordValue(i);
|
||||||
|
|||||||
@@ -0,0 +1,79 @@
|
|||||||
|
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
|
||||||
|
|
||||||
|
import org.HdrHistogram.DirectArrayHistogram;
|
||||||
|
import org.HdrHistogram.HistogramSketch;
|
||||||
|
import org.HdrHistogram.Histogramer;
|
||||||
|
import org.HdrHistogram.Percentile;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.BufferedWriter;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.FileSystems;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.Random;
|
||||||
|
import java.util.concurrent.ThreadLocalRandom;
|
||||||
|
|
||||||
|
public class HistogramSketchTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void describeTest() throws Exception{
|
||||||
|
DirectArrayHistogram histogram = new DirectArrayHistogram(1, 1000000, 3,
|
||||||
|
ByteBuffer.allocate(HistogramSketch.getUpdatableSerializationBytes(1, 1000000, 3)));
|
||||||
|
System.out.println(histogram.describe());
|
||||||
|
for (int i = 0; i < 10000; i++) {
|
||||||
|
histogram.recordValue(i);
|
||||||
|
}
|
||||||
|
System.out.println(histogram.describe());
|
||||||
|
for (Percentile percentile : histogram.percentileList(100)) {
|
||||||
|
System.out.println(percentile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void describeTest1() throws Exception{
|
||||||
|
HistogramSketch histogram = new HistogramSketch(1);
|
||||||
|
System.out.println(histogram.describe());
|
||||||
|
for (int i = 0; i < 10000; i++) {
|
||||||
|
histogram.recordValue(i);
|
||||||
|
}
|
||||||
|
System.out.println(histogram.describe());
|
||||||
|
for (Percentile percentile : histogram.percentileList(100)) {
|
||||||
|
System.out.println(percentile);
|
||||||
|
}
|
||||||
|
System.out.println(StringUtils.repeat('#', 100));
|
||||||
|
histogram = new HistogramSketch(1);
|
||||||
|
for (int i = 0; i < 10000; i++) {
|
||||||
|
histogram.recordValue(ThreadLocalRandom.current().nextLong(100000));
|
||||||
|
}
|
||||||
|
System.out.println(histogram.describe());
|
||||||
|
for (Percentile percentile : histogram.percentileList(100)) {
|
||||||
|
System.out.println(percentile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void describeTest3() throws Exception{
|
||||||
|
HistogramSketch histogram = new HistogramSketch(3);
|
||||||
|
System.out.println(histogram.describe());
|
||||||
|
for (int i = 0; i < 10000; i++) {
|
||||||
|
histogram.recordValue(i);
|
||||||
|
}
|
||||||
|
System.out.println(histogram.describe());
|
||||||
|
for (Percentile percentile : histogram.percentileList(100)) {
|
||||||
|
System.out.println(percentile);
|
||||||
|
}
|
||||||
|
System.out.println(StringUtils.repeat('#', 100));
|
||||||
|
histogram = new HistogramSketch(3);
|
||||||
|
for (int i = 0; i < 10000; i++) {
|
||||||
|
histogram.recordValue(ThreadLocalRandom.current().nextLong(100000));
|
||||||
|
}
|
||||||
|
System.out.println(histogram.describe());
|
||||||
|
for (Percentile percentile : histogram.percentileList(100)) {
|
||||||
|
System.out.println(percentile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,12 +1,15 @@
|
|||||||
package org.apache.druid.query.aggregation.sketch.HdrHistogram.sql;
|
package org.apache.druid.query.aggregation.sketch.HdrHistogram.sql;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson2.JSON;
|
||||||
import com.fasterxml.jackson.databind.Module;
|
import com.fasterxml.jackson.databind.Module;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
|
import com.google.inject.Injector;
|
||||||
import org.apache.calcite.schema.SchemaPlus;
|
import org.apache.calcite.schema.SchemaPlus;
|
||||||
import org.apache.druid.data.input.InputRow;
|
import org.apache.druid.data.input.InputRow;
|
||||||
|
import org.apache.druid.guice.DruidInjectorBuilder;
|
||||||
import org.apache.druid.java.util.common.granularity.Granularities;
|
import org.apache.druid.java.util.common.granularity.Granularities;
|
||||||
import org.apache.druid.java.util.common.io.Closer;
|
import org.apache.druid.java.util.common.io.Closer;
|
||||||
import org.apache.druid.query.Druids;
|
import org.apache.druid.query.Druids;
|
||||||
@@ -27,66 +30,49 @@ import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
|
|||||||
import org.apache.druid.segment.IndexBuilder;
|
import org.apache.druid.segment.IndexBuilder;
|
||||||
import org.apache.druid.segment.QueryableIndex;
|
import org.apache.druid.segment.QueryableIndex;
|
||||||
import org.apache.druid.segment.TestHelper;
|
import org.apache.druid.segment.TestHelper;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
import org.apache.druid.segment.column.ValueType;
|
import org.apache.druid.segment.column.ValueType;
|
||||||
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
|
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
|
||||||
|
import org.apache.druid.segment.join.JoinableFactoryWrapper;
|
||||||
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
|
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
|
||||||
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
|
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
|
||||||
import org.apache.druid.server.QueryStackTests;
|
import org.apache.druid.server.QueryStackTests;
|
||||||
import org.apache.druid.server.security.AuthTestUtils;
|
import org.apache.druid.server.security.AuthTestUtils;
|
||||||
import org.apache.druid.server.security.AuthenticationResult;
|
import org.apache.druid.server.security.AuthenticationResult;
|
||||||
import org.apache.druid.sql.SqlLifecycle;
|
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
|
||||||
import org.apache.druid.sql.SqlLifecycleFactory;
|
import org.apache.druid.sql.calcite.QueryTestBuilder;
|
||||||
|
import org.apache.druid.sql.calcite.QueryTestRunner;
|
||||||
import org.apache.druid.sql.calcite.filtration.Filtration;
|
import org.apache.druid.sql.calcite.filtration.Filtration;
|
||||||
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
|
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
|
||||||
import org.apache.druid.sql.calcite.planner.PlannerConfig;
|
import org.apache.druid.sql.calcite.planner.PlannerConfig;
|
||||||
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
||||||
import org.apache.druid.sql.calcite.planner.PlannerFactory;
|
import org.apache.druid.sql.calcite.planner.PlannerFactory;
|
||||||
import org.apache.druid.sql.calcite.util.CalciteTestBase;
|
import org.apache.druid.sql.calcite.util.*;
|
||||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
|
||||||
import org.apache.druid.sql.calcite.util.QueryLogHook;
|
|
||||||
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
|
|
||||||
import org.apache.druid.timeline.DataSegment;
|
import org.apache.druid.timeline.DataSegment;
|
||||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
import org.junit.rules.TemporaryFolder;
|
import org.junit.rules.TemporaryFolder;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.*;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
public class HdrHistogramQuantileSqlAggregatorTest extends BaseCalciteQueryTest {
|
||||||
private static final String DATA_SOURCE = "foo";
|
@Override
|
||||||
|
public void gatherProperties(Properties properties)
|
||||||
private static QueryRunnerFactoryConglomerate conglomerate;
|
{
|
||||||
private static Closer resourceCloser;
|
super.gatherProperties(properties);
|
||||||
private static AuthenticationResult authenticationResult = CalciteTests.REGULAR_USER_AUTH_RESULT;
|
|
||||||
private static final Map<String, Object> QUERY_CONTEXT_DEFAULT = ImmutableMap.of(
|
|
||||||
PlannerContext.CTX_SQL_QUERY_ID, "dummy"
|
|
||||||
);
|
|
||||||
|
|
||||||
@Rule
|
|
||||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
|
||||||
|
|
||||||
@Rule
|
|
||||||
public QueryLogHook queryLogHook = QueryLogHook.create();
|
|
||||||
|
|
||||||
private SpecificSegmentsQuerySegmentWalker walker;
|
|
||||||
private SqlLifecycleFactory sqlLifecycleFactory;
|
|
||||||
|
|
||||||
@BeforeClass
|
|
||||||
public static void setUpClass() {
|
|
||||||
resourceCloser = Closer.create();
|
|
||||||
conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(resourceCloser);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@Override
|
||||||
public static void tearDownClass() throws IOException {
|
public void configureGuice(DruidInjectorBuilder builder)
|
||||||
resourceCloser.close();
|
{
|
||||||
|
super.configureGuice(builder);
|
||||||
|
builder.addModule(new HdrHistogramModule());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final List<InputRow> ROWS1 = ImmutableList.of(
|
public static final List<InputRow> ROWS1 = ImmutableList.of(
|
||||||
CalciteTests.createRow(
|
TestDataBuilder.createRow(
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put("t", "2000-01-01")
|
.put("t", "2000-01-01")
|
||||||
.put("m1", "1")
|
.put("m1", "1")
|
||||||
@@ -96,7 +82,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
.put("dim3", ImmutableList.of("a", "b"))
|
.put("dim3", ImmutableList.of("a", "b"))
|
||||||
.build()
|
.build()
|
||||||
),
|
),
|
||||||
CalciteTests.createRow(
|
TestDataBuilder.createRow(
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put("t", "2000-01-02")
|
.put("t", "2000-01-02")
|
||||||
.put("m1", "2.0")
|
.put("m1", "2.0")
|
||||||
@@ -106,7 +92,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
.put("dim3", ImmutableList.of("b", "c"))
|
.put("dim3", ImmutableList.of("b", "c"))
|
||||||
.build()
|
.build()
|
||||||
),
|
),
|
||||||
CalciteTests.createRow(
|
TestDataBuilder.createRow(
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put("t", "2000-01-03")
|
.put("t", "2000-01-03")
|
||||||
.put("m1", "3.0")
|
.put("m1", "3.0")
|
||||||
@@ -116,7 +102,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
.put("dim3", ImmutableList.of("d"))
|
.put("dim3", ImmutableList.of("d"))
|
||||||
.build()
|
.build()
|
||||||
),
|
),
|
||||||
CalciteTests.createRow(
|
TestDataBuilder.createRow(
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put("t", "2001-01-01")
|
.put("t", "2001-01-01")
|
||||||
.put("m1", "4.0")
|
.put("m1", "4.0")
|
||||||
@@ -126,7 +112,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
.put("dim3", ImmutableList.of(""))
|
.put("dim3", ImmutableList.of(""))
|
||||||
.build()
|
.build()
|
||||||
),
|
),
|
||||||
CalciteTests.createRow(
|
TestDataBuilder.createRow(
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put("t", "2001-01-02")
|
.put("t", "2001-01-02")
|
||||||
.put("m1", "5.0")
|
.put("m1", "5.0")
|
||||||
@@ -136,7 +122,7 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
.put("dim3", ImmutableList.of())
|
.put("dim3", ImmutableList.of())
|
||||||
.build()
|
.build()
|
||||||
),
|
),
|
||||||
CalciteTests.createRow(
|
TestDataBuilder.createRow(
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put("t", "2001-01-03")
|
.put("t", "2001-01-03")
|
||||||
.put("m1", "6.0")
|
.put("m1", "6.0")
|
||||||
@@ -146,15 +132,20 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
@Before
|
@SuppressWarnings("resource")
|
||||||
public void setUp() throws Exception {
|
@Override
|
||||||
|
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
|
||||||
|
final QueryRunnerFactoryConglomerate conglomerate,
|
||||||
|
final JoinableFactoryWrapper joinableFactory,
|
||||||
|
final Injector injector
|
||||||
|
) throws IOException{
|
||||||
HdrHistogramModule.registerSerde();
|
HdrHistogramModule.registerSerde();
|
||||||
for (Module mod : new HdrHistogramModule().getJacksonModules()) {
|
for (Module mod : new HdrHistogramModule().getJacksonModules()) {
|
||||||
CalciteTests.getJsonMapper().registerModule(mod);
|
CalciteTests.getJsonMapper().registerModule(mod);
|
||||||
TestHelper.JSON_MAPPER.registerModule(mod);
|
TestHelper.JSON_MAPPER.registerModule(mod);
|
||||||
}
|
}
|
||||||
|
//final QueryableIndex index = TestHelper.getTestIndexIO().loadIndex(new File("D:/doc/datas/testIndex-6201298"));
|
||||||
final QueryableIndex index = IndexBuilder.create()
|
/*final QueryableIndex index = IndexBuilder.create()
|
||||||
.tmpDir(temporaryFolder.newFolder())
|
.tmpDir(temporaryFolder.newFolder())
|
||||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||||
.schema(
|
.schema(
|
||||||
@@ -176,81 +167,207 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
)
|
)
|
||||||
//.rows(CalciteTests.ROWS1)
|
//.rows(CalciteTests.ROWS1)
|
||||||
.rows(ROWS1)
|
.rows(ROWS1)
|
||||||
.buildMMappedIndex();
|
.buildMMappedIndex();*/
|
||||||
|
|
||||||
walker = new SpecificSegmentsQuerySegmentWalker(conglomerate).add(
|
String[] files = new String[]{
|
||||||
DataSegment.builder()
|
"D:\\doc\\datas\\statistics_rule_segments\\2023-10-16T00_00_00.000Z_2023-10-17T00_00_00.000Z\\2023-10-16T07_51_47.981Z\\0\\17a457e4-599d-49c2-86e7-6655851bb99a\\index",
|
||||||
.dataSource(DATA_SOURCE)
|
"D:\\doc\\datas\\statistics_rule_segments\\2023-10-15T00_00_00.000Z_2023-10-16T00_00_00.000Z\\2023-10-15T00_00_04.240Z\\15\\9a766f6c-779d-4f9f-9ff5-6a12c19b8c6c\\index"
|
||||||
.interval(index.getDataInterval())
|
};
|
||||||
.version("1")
|
files = new String[]{
|
||||||
.shardSpec(new LinearShardSpec(0))
|
"D:/doc/datas/testIndex-6201298"
|
||||||
.size(0)
|
};
|
||||||
.build(),
|
SpecificSegmentsQuerySegmentWalker walker = new SpecificSegmentsQuerySegmentWalker(conglomerate);
|
||||||
index
|
|
||||||
);
|
|
||||||
|
|
||||||
final PlannerConfig plannerConfig = new PlannerConfig();
|
for (int i = 0; i < files.length; i++) {
|
||||||
final DruidOperatorTable operatorTable = new DruidOperatorTable(
|
QueryableIndex index = TestHelper.getTestIndexIO().loadIndex(new File(files[i]));
|
||||||
ImmutableSet.of(
|
return walker.add(
|
||||||
new HdrHistogramQuantileSqlAggregator(),
|
DataSegment.builder()
|
||||||
new HdrHistogramObjectSqlAggregator()
|
.dataSource(CalciteTests.DATASOURCE1)
|
||||||
),
|
.interval(index.getDataInterval())
|
||||||
ImmutableSet.of(
|
.version("1")
|
||||||
new HdrHistogramQuantilesOperatorConversion(),
|
.shardSpec(new LinearShardSpec(i))
|
||||||
new HdrHistogramPercentilesOperatorConversion()
|
.size(0)
|
||||||
)
|
.build(),
|
||||||
);
|
index
|
||||||
SchemaPlus rootSchema =
|
);
|
||||||
CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
|
}
|
||||||
|
|
||||||
sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(
|
return walker;
|
||||||
new PlannerFactory(
|
|
||||||
rootSchema,
|
|
||||||
CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate),
|
|
||||||
operatorTable,
|
|
||||||
CalciteTests.createExprMacroTable(),
|
|
||||||
plannerConfig,
|
|
||||||
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
|
|
||||||
CalciteTests.getJsonMapper(),
|
|
||||||
CalciteTests.DRUID_SCHEMA_NAME
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@After
|
@Test
|
||||||
public void tearDown() throws Exception {
|
public void testCount0() throws Exception {
|
||||||
walker.close();
|
String sql = "select count(1) cnt, APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2) from druid.foo where dim1 = 'aaa'";
|
||||||
walker = null;
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQueryError() throws Exception {
|
||||||
|
String sql = "select min(__time) min_time,max(__time) max_time, HDR_HISTOGRAM(latency_ms_sketch) hdr from druid.foo";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlDESCRIBE() throws Exception {
|
||||||
|
String sql = "select HDR_GET_QUANTILES(HDR_HISTOGRAM(m1, 1, 100, 2), 0, 0.25, 0.5, 0.75, 1) a, HDR_DESCRIBE(HDR_HISTOGRAM(m1, 1, 100, 2)) b, HDR_DESCRIBE(HDR_HISTOGRAM(hist_m1, 1, 100, 2)) c from druid.foo";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlDESCRIBE2() throws Exception {
|
||||||
|
String sql = "select HDR_GET_QUANTILES(HDR_HISTOGRAM(m1, 1, 100, 2), 0, 0.25, 0.5, 0.75, 1) a, HDR_GET_PERCENTILES_DESCRIPTION(HDR_HISTOGRAM(m1, 1, 100, 2)) b, HDR_GET_PERCENTILES_DESCRIPTION(HDR_HISTOGRAM(hist_m1, 1, 100, 2)) c from druid.foo";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQuery() throws Exception {
|
public void testSqlQuery() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
String[] columns = new String[]{"__time", "dim1", "dim2", "dim3", "cnt", "hist_m1", "m1"};
|
||||||
String sql = "select * from druid.foo";
|
String sql = "select " + String.join(",", columns) + " from druid.foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql);
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
Map row = new LinkedHashMap();
|
||||||
|
for (int i = 0; i < result.length; i++) {
|
||||||
|
row.put(columns[i], result[i]);
|
||||||
|
}
|
||||||
|
System.out.println(JSON.toJSONString(row));
|
||||||
|
// System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < columns.length; i++) {
|
||||||
|
Object[] values = new Object[results.size()];
|
||||||
|
for (int j = 0; j < results.size(); j++) {
|
||||||
|
values[j] = results.get(j)[i];
|
||||||
|
}
|
||||||
|
System.out.println(columns[i] + ":" + Arrays.toString(values));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery3() throws Exception {
|
||||||
|
//cannotVectorize();
|
||||||
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
|
String sql = "select HDR_HISTOGRAM(hist_m1) hdr from druid.foo ";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery4() throws Exception {
|
||||||
|
//cannotVectorize();
|
||||||
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
|
String sql = "select APPROX_QUANTILE_HDR (hdr, 0.95) as p95th_tcp_latency_ms from (select HDR_HISTOGRAM(hist_m1) hdr from druid.foo) t ";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGroup() throws Exception {
|
public void testSqlQuery5() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
//cannotVectorize();
|
||||||
String sql = "select cnt, APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2) from druid.foo group by cnt";
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
final List<Object[]> results =
|
String sql = "select dim1, APPROX_QUANTILE_HDR (hdr, 0.95) as p95th_tcp_latency_ms from (select dim1, HDR_HISTOGRAM(hist_m1) hdr from druid.foo group by dim1) t group by dim1";
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery6() throws Exception {
|
||||||
|
//cannotVectorize();
|
||||||
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
|
//String sql = "select dim1, APPROX_QUANTILE_HDR (hdr, 0.95) as p95th_tcp_latency_ms from (select dim1, HDR_HISTOGRAM(hist_m1) hdr from druid.foo group by dim1 limit 10) t group by dim1";
|
||||||
|
String sql = "select dim1, HDR_GET_QUANTILES(HDR_HISTOGRAM(hdr), 0.95) as p95th_tcp_latency_ms from (select dim1, HDR_HISTOGRAM(hist_m1) hdr from druid.foo group by dim1 limit 10) t group by dim1";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGroup() throws Exception {
|
||||||
|
String sql = "select cnt, APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2) from druid.foo group by cnt";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGroup2() throws Exception {
|
public void testGroup2() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
String sql = "select HDR_HISTOGRAM(hist_m1) from druid.foo";
|
String sql = "select HDR_HISTOGRAM(hist_m1) from druid.foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGroup3() throws Exception {
|
||||||
|
String sql = "select APPROX_QUANTILE_HDR(h, 0.5) from(select HDR_HISTOGRAM(hist_m1) h from druid.foo) t";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGroup4() throws Exception {
|
||||||
|
String sql = "select hdr_get_quantiles(h, 0.1, 0.2, 0.3, 0.5, 0.9, 0.99, 1) from(select HDR_HISTOGRAM(hist_m1) h from druid.foo) t";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -258,10 +375,11 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQueryGeneHdr() throws Exception {
|
public void testSqlQueryGeneHdr() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
String sql = "select HDR_HISTOGRAM(hist_m1, 1, 100, 2), HDR_HISTOGRAM(cnt, 1, 100, 2) from druid.foo";
|
String sql = "select HDR_HISTOGRAM(hist_m1, 1, 100, 2), HDR_HISTOGRAM(cnt, 1, 100, 2) from druid.foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -269,11 +387,12 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQueryGeneHdr2() throws Exception {
|
public void testSqlQueryGeneHdr2() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
// HDR_HISTOGRAM(hist_m1, 1, 100, 2),
|
// HDR_HISTOGRAM(hist_m1, 1, 100, 2),
|
||||||
String sql = "select HDR_GET_QUANTILES(HDR_HISTOGRAM(m1, 1, 100, 2), 0.1, 0.2, 0.3, 0.5, 0.9, 1) from druid.foo";
|
String sql = "select HDR_GET_QUANTILES(HDR_HISTOGRAM(m1, 1, 100, 2), 0.1, 0.2, 0.3, 0.5, 0.9, 1) from druid.foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -281,44 +400,47 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQueryGeneHdrArgs() throws Exception {
|
public void testSqlQueryGeneHdrArgs() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
String sql = "select HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1), 0.1, 0.2, 0.3, 0.5, 0.9, 1), "
|
String sql = "select HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1), 0.1, 0.2, 0.3, 0.5, 0.9, 1), "
|
||||||
+ "HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1, 2), 0.1, 0.2, 0.3, 0.5, 0.9, 1) ,\n"
|
+ "HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1, 2), 0.1, 0.2, 0.3, 0.5, 0.9, 1) ,\n"
|
||||||
+ "HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1, 1, 110, 2), 0.1, 0.2, 0.3, 0.5, 0.9, 1) ,\n"
|
+ "HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1, 1, 110, 2), 0.1, 0.2, 0.3, 0.5, 0.9, 1) ,\n"
|
||||||
+ "HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1, 1, 110, 2, false), 0.1, 0.2, 0.3, 0.5, 0.9, 1) \n"
|
+ "HDR_GET_QUANTILEs(HDR_HISTOGRAM(m1, 1, 110, 2, false), 0.1, 0.2, 0.3, 0.5, 0.9, 1) \n"
|
||||||
+ "from druid.foo";
|
+ "from druid.foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQueryGeneHdrArgs2() throws Exception {
|
public void testSqlQueryGeneHdrArgs2() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
String sql = "select APPROX_QUANTILE_HDR(m1, 0.1), "
|
String sql = "select APPROX_QUANTILE_HDR(m1, 0.1), "
|
||||||
+ "APPROX_QUANTILE_HDR(m1, 0.1, 2) ,\n"
|
+ "APPROX_QUANTILE_HDR(m1, 0.1, 2) ,\n"
|
||||||
+ "APPROX_QUANTILE_HDR(m1, 0.1, 1, 110, 2) ,\n"
|
+ "APPROX_QUANTILE_HDR(m1, 0.1, 1, 110, 2) ,\n"
|
||||||
+ "APPROX_QUANTILE_HDR(m1, 0.1, 1, 110, 2, false)\n"
|
+ "APPROX_QUANTILE_HDR(m1, 0.1, 1, 110, 2, false)\n"
|
||||||
+ "from druid.foo";
|
+ "from druid.foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
for (Object[] result : results) {
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
System.out.println(Arrays.toString(result));
|
List<Object[]> results = queryResults.results;
|
||||||
}
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQueryGeneHdr3() throws Exception {
|
public void testSqlQueryGeneHdr3() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
// 函数不区分大小写
|
// 函数不区分大小写
|
||||||
// HDR_HISTOGRAM(hist_m1, 1, 100, 2),
|
// HDR_HISTOGRAM(hist_m1, 1, 100, 2),
|
||||||
//String sql = "select HDR_GET_PERCENTILES(HDR_HISTOGRAM(m1, 1, 100, 2)) from druid.foo";
|
//String sql = "select HDR_GET_PERCENTILES(HDR_HISTOGRAM(m1, 1, 100, 2)) from druid.foo";
|
||||||
//String sql = "select hdr_get_percentiles(hdr_histogram(m1, 1, 100, 2)) from druid.foo";
|
//String sql = "select hdr_get_percentiles(hdr_histogram(m1, 1, 100, 2)) from druid.foo";
|
||||||
String sql = "select hdr_get_percentiles(hdr_histogram(hist_m1, 1, 100, 2)) from druid.foo";
|
String sql = "select hdr_get_percentiles(hdr_histogram(hist_m1, 1, 100, 2)) from druid.foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -326,7 +448,6 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQueryQuantiles() throws Exception {
|
public void testSqlQueryQuantiles() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
String sql = "SELECT\n"
|
String sql = "SELECT\n"
|
||||||
+ "APPROX_QUANTILE_HDR(m1, 0.01, 1, 100, 2),\n"
|
+ "APPROX_QUANTILE_HDR(m1, 0.01, 1, 100, 2),\n"
|
||||||
+ "APPROX_QUANTILE_HDR(m1, 0.5, 1, 100, 2),\n"
|
+ "APPROX_QUANTILE_HDR(m1, 0.5, 1, 100, 2),\n"
|
||||||
@@ -338,9 +459,10 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
+ "APPROX_QUANTILE_HDR(m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc'),\n"
|
+ "APPROX_QUANTILE_HDR(m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc'),\n"
|
||||||
+ "APPROX_QUANTILE_HDR(cnt, 0.5, 1, 100, 2)\n"
|
+ "APPROX_QUANTILE_HDR(cnt, 0.5, 1, 100, 2)\n"
|
||||||
+ "FROM foo";
|
+ "FROM foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
System.out.println(sql);
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -348,7 +470,6 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQueryQuantilesOnComplexColumn() throws Exception {
|
public void testSqlQueryQuantilesOnComplexColumn() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
String sql = "SELECT\n"
|
String sql = "SELECT\n"
|
||||||
+ "APPROX_QUANTILE_HDR(hist_m1, 0.01, 1, 100, 2),\n"
|
+ "APPROX_QUANTILE_HDR(hist_m1, 0.01, 1, 100, 2),\n"
|
||||||
+ "APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2),\n"
|
+ "APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2),\n"
|
||||||
@@ -358,9 +479,10 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
+ "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 <> 'abc'),\n"
|
+ "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 <> 'abc'),\n"
|
||||||
+ "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc')\n"
|
+ "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc')\n"
|
||||||
+ "FROM foo";
|
+ "FROM foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
System.out.println(sql);
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -373,7 +495,6 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testQuantileOnFloatAndLongs() throws Exception {
|
public void testQuantileOnFloatAndLongs() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
String sql = "SELECT\n"
|
String sql = "SELECT\n"
|
||||||
+ "APPROX_QUANTILE_HDR(m1, 0.01, 1, 100, 2),\n"
|
+ "APPROX_QUANTILE_HDR(m1, 0.01, 1, 100, 2),\n"
|
||||||
+ "APPROX_QUANTILE_HDR(m1, 0.5, 1, 100, 2),\n"
|
+ "APPROX_QUANTILE_HDR(m1, 0.5, 1, 100, 2),\n"
|
||||||
@@ -385,60 +506,55 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
+ "APPROX_QUANTILE_HDR(m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc'),\n"
|
+ "APPROX_QUANTILE_HDR(m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc'),\n"
|
||||||
+ "APPROX_QUANTILE_HDR(cnt, 0.5, 1, 100, 2)\n"
|
+ "APPROX_QUANTILE_HDR(cnt, 0.5, 1, 100, 2)\n"
|
||||||
+ "FROM foo";
|
+ "FROM foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder = builder.expectedQueries(Collections.singletonList(Druids.newTimeseriesQueryBuilder()
|
||||||
System.out.println(sql);
|
.dataSource(CalciteTests.DATASOURCE1)
|
||||||
|
.intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity())))
|
||||||
|
.granularity(Granularities.ALL)
|
||||||
|
.virtualColumns(
|
||||||
|
new ExpressionVirtualColumn(
|
||||||
|
"v0",
|
||||||
|
"(\"m1\" * 2)",
|
||||||
|
ColumnType.LONG,
|
||||||
|
TestExprMacroTable.INSTANCE
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.aggregators(ImmutableList.of(
|
||||||
|
new HdrHistogramAggregatorFactory("a0:agg", "m1", 1L, 100L, 2, true),
|
||||||
|
new HdrHistogramAggregatorFactory("a4:agg", "v0", 1L, 100L, 2, true),
|
||||||
|
new FilteredAggregatorFactory(
|
||||||
|
new HdrHistogramAggregatorFactory("a5:agg", "m1", 1L, 100L, 2, true),
|
||||||
|
new SelectorDimFilter("dim1", "abc", null)
|
||||||
|
),
|
||||||
|
new FilteredAggregatorFactory(
|
||||||
|
new HdrHistogramAggregatorFactory("a6:agg", "m1", 1L, 100L, 2, true),
|
||||||
|
new NotDimFilter(new SelectorDimFilter("dim1", "abc", null))
|
||||||
|
),
|
||||||
|
new HdrHistogramAggregatorFactory("a8:agg", "cnt", 1L, 100L, 2, true)
|
||||||
|
))
|
||||||
|
.postAggregators(
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a0", "a0:agg", 0.01f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a1", "a0:agg", 0.50f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a2", "a0:agg", 0.98f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a3", "a0:agg", 0.99f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a4", "a4:agg", 0.97f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a5", "a5:agg", 0.99f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a6", "a6:agg", 0.999f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a7", "a5:agg", 0.999f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a8", "a8:agg", 0.50f)
|
||||||
|
)
|
||||||
|
.context(QUERY_CONTEXT_DEFAULT)
|
||||||
|
.build()));
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify query
|
|
||||||
Assert.assertEquals(
|
|
||||||
Druids.newTimeseriesQueryBuilder()
|
|
||||||
.dataSource(CalciteTests.DATASOURCE1)
|
|
||||||
.intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity())))
|
|
||||||
.granularity(Granularities.ALL)
|
|
||||||
.virtualColumns(
|
|
||||||
new ExpressionVirtualColumn(
|
|
||||||
"v0",
|
|
||||||
"(\"m1\" * 2)",
|
|
||||||
ValueType.LONG,
|
|
||||||
TestExprMacroTable.INSTANCE
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.aggregators(ImmutableList.of(
|
|
||||||
new HdrHistogramAggregatorFactory("a0:agg", "m1", 1L, 100L, 2, true),
|
|
||||||
new HdrHistogramAggregatorFactory("a4:agg", "v0", 1L, 100L, 2, true),
|
|
||||||
new FilteredAggregatorFactory(
|
|
||||||
new HdrHistogramAggregatorFactory("a5:agg", "m1", 1L, 100L, 2, true),
|
|
||||||
new SelectorDimFilter("dim1", "abc", null)
|
|
||||||
),
|
|
||||||
new FilteredAggregatorFactory(
|
|
||||||
new HdrHistogramAggregatorFactory("a6:agg", "m1", 1L, 100L, 2, true),
|
|
||||||
new NotDimFilter(new SelectorDimFilter("dim1", "abc", null))
|
|
||||||
),
|
|
||||||
new HdrHistogramAggregatorFactory("a8:agg", "cnt", 1L, 100L, 2, true)
|
|
||||||
))
|
|
||||||
.postAggregators(
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a0", "a0:agg", 0.01f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a1", "a0:agg", 0.50f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a2", "a0:agg", 0.98f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a3", "a0:agg", 0.99f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a4", "a4:agg", 0.97f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a5", "a5:agg", 0.99f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a6", "a6:agg", 0.999f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a7", "a5:agg", 0.999f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a8", "a8:agg", 0.50f)
|
|
||||||
)
|
|
||||||
.context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy"))
|
|
||||||
.build(),
|
|
||||||
Iterables.getOnlyElement(queryLogHook.getRecordedQueries())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testQuantileOnComplexColumn() throws Exception{
|
public void testQuantileOnComplexColumn() throws Exception{
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
String sql = "SELECT\n"
|
String sql = "SELECT\n"
|
||||||
+ "APPROX_QUANTILE_HDR(hist_m1, 0.01, 1, 100, 2),\n"
|
+ "APPROX_QUANTILE_HDR(hist_m1, 0.01, 1, 100, 2),\n"
|
||||||
+ "APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2),\n"
|
+ "APPROX_QUANTILE_HDR(hist_m1, 0.5, 1, 100, 2),\n"
|
||||||
@@ -448,43 +564,42 @@ public class HdrHistogramQuantileSqlAggregatorTest extends CalciteTestBase {
|
|||||||
+ "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 <> 'abc'),\n"
|
+ "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 <> 'abc'),\n"
|
||||||
+ "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc')\n"
|
+ "APPROX_QUANTILE_HDR(hist_m1, 0.999, 1, 100, 2) FILTER(WHERE dim1 = 'abc')\n"
|
||||||
+ "FROM foo";
|
+ "FROM foo";
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder = builder.expectedQueries(Collections.singletonList(Druids.newTimeseriesQueryBuilder()
|
||||||
System.out.println(sql);
|
.dataSource(CalciteTests.DATASOURCE1)
|
||||||
|
.intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity())))
|
||||||
|
.granularity(Granularities.ALL)
|
||||||
|
.aggregators(ImmutableList.of(
|
||||||
|
new HdrHistogramMergeAggregatorFactory("a0:agg", "hist_m1", 1L, 100L, 2, true),
|
||||||
|
new FilteredAggregatorFactory(
|
||||||
|
new HdrHistogramMergeAggregatorFactory("a4:agg", "hist_m1", 1L, 100L, 2, true),
|
||||||
|
new SelectorDimFilter("dim1", "abc", null)
|
||||||
|
),
|
||||||
|
new FilteredAggregatorFactory(
|
||||||
|
new HdrHistogramMergeAggregatorFactory("a5:agg", "hist_m1", 1L, 100L, 2, true),
|
||||||
|
new NotDimFilter(new SelectorDimFilter("dim1", "abc", null))
|
||||||
|
)
|
||||||
|
))
|
||||||
|
.postAggregators(
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a0", "a0:agg", 0.01f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a1", "a0:agg", 0.50f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a2", "a0:agg", 0.98f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a3", "a0:agg", 0.99f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a4", "a4:agg", 0.99f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a5", "a5:agg", 0.999f),
|
||||||
|
new HdrHistogramToQuantilePostAggregator("a6", "a4:agg", 0.999f)
|
||||||
|
)
|
||||||
|
.context(QUERY_CONTEXT_DEFAULT)
|
||||||
|
.build()));
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify query
|
|
||||||
Assert.assertEquals(
|
|
||||||
Druids.newTimeseriesQueryBuilder()
|
|
||||||
.dataSource(CalciteTests.DATASOURCE1)
|
|
||||||
.intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity())))
|
|
||||||
.granularity(Granularities.ALL)
|
|
||||||
.aggregators(ImmutableList.of(
|
|
||||||
new HdrHistogramMergeAggregatorFactory("a0:agg", "hist_m1", 1L, 100L, 2, true),
|
|
||||||
new FilteredAggregatorFactory(
|
|
||||||
new HdrHistogramMergeAggregatorFactory("a4:agg", "hist_m1", 1L, 100L, 2, true),
|
|
||||||
new SelectorDimFilter("dim1", "abc", null)
|
|
||||||
),
|
|
||||||
new FilteredAggregatorFactory(
|
|
||||||
new HdrHistogramMergeAggregatorFactory("a5:agg", "hist_m1", 1L, 100L, 2, true),
|
|
||||||
new NotDimFilter(new SelectorDimFilter("dim1", "abc", null))
|
|
||||||
)
|
|
||||||
))
|
|
||||||
.postAggregators(
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a0", "a0:agg", 0.01f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a1", "a0:agg", 0.50f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a2", "a0:agg", 0.98f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a3", "a0:agg", 0.99f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a4", "a4:agg", 0.99f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a5", "a5:agg", 0.999f),
|
|
||||||
new HdrHistogramToQuantilePostAggregator("a6", "a4:agg", 0.999f)
|
|
||||||
)
|
|
||||||
.context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy"))
|
|
||||||
.build(),
|
|
||||||
Iterables.getOnlyElement(queryLogHook.getRecordedQueries())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static PostAggregator makeFieldAccessPostAgg(String name) {
|
private static PostAggregator makeFieldAccessPostAgg(String name) {
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<groupId>org.apache.druid.extensions</groupId>
|
<groupId>org.apache.druid.extensions</groupId>
|
||||||
<artifactId>druid-hlld_0.18.1</artifactId>
|
<artifactId>druid-hlld_26.0.0</artifactId>
|
||||||
<name>druid-hlld</name>
|
<name>druid-hlld</name>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0-SNAPSHOT</version>
|
||||||
|
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
<maven.compiler.source>1.8</maven.compiler.source>
|
<maven.compiler.source>1.8</maven.compiler.source>
|
||||||
<maven.compiler.target>1.8</maven.compiler.target>
|
<maven.compiler.target>1.8</maven.compiler.target>
|
||||||
<druid.version>0.18.1</druid.version>
|
<druid.version>26.0.0</druid.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
@@ -33,6 +33,14 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- Tests -->
|
<!-- Tests -->
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.easymock</groupId>
|
||||||
|
<artifactId>easymock</artifactId>
|
||||||
|
<version>4.3</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.druid</groupId>
|
<groupId>org.apache.druid</groupId>
|
||||||
<artifactId>druid-processing</artifactId>
|
<artifactId>druid-processing</artifactId>
|
||||||
@@ -42,9 +50,17 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.druid</groupId>
|
<groupId>org.apache.druid</groupId>
|
||||||
<artifactId>druid-benchmarks</artifactId>
|
<artifactId>druid-server</artifactId>
|
||||||
<version>${druid.version}</version>
|
<version>${druid.version}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
|
<type>test-jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.druid</groupId>
|
||||||
|
<artifactId>druid-sql</artifactId>
|
||||||
|
<version>${druid.version}</version>
|
||||||
|
<type>test-jar</type>
|
||||||
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import org.apache.druid.query.aggregation.*;
|
|||||||
import org.apache.druid.query.cache.CacheKeyBuilder;
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
import org.apache.druid.segment.ColumnSelectorFactory;
|
import org.apache.druid.segment.ColumnSelectorFactory;
|
||||||
import org.apache.druid.segment.ColumnValueSelector;
|
import org.apache.druid.segment.ColumnValueSelector;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
@@ -27,6 +28,7 @@ public class HllAggregatorFactory extends AggregatorFactory {
|
|||||||
protected final String fieldName;
|
protected final String fieldName;
|
||||||
protected final int precision;
|
protected final int precision;
|
||||||
protected final boolean round;
|
protected final boolean round;
|
||||||
|
protected final int updatableSerializationBytes;
|
||||||
|
|
||||||
public HllAggregatorFactory(
|
public HllAggregatorFactory(
|
||||||
@JsonProperty("name") final String name,
|
@JsonProperty("name") final String name,
|
||||||
@@ -44,6 +46,7 @@ public class HllAggregatorFactory extends AggregatorFactory {
|
|||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
this.precision = precision == null ? DEFAULT_PRECISION : precision;
|
this.precision = precision == null ? DEFAULT_PRECISION : precision;
|
||||||
this.round = round == null ? DEFAULT_ROUND : round;
|
this.round = round == null ? DEFAULT_ROUND : round;
|
||||||
|
this.updatableSerializationBytes = getUpdatableSerializationBytes();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -145,9 +148,9 @@ public class HllAggregatorFactory extends AggregatorFactory {
|
|||||||
Math.max(precision, castedOther.precision),
|
Math.max(precision, castedOther.precision),
|
||||||
round || castedOther.round
|
round || castedOther.round
|
||||||
);
|
);
|
||||||
} else {
|
|
||||||
throw new AggregatorFactoryNotMergeableException(this, other);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
throw new AggregatorFactoryNotMergeableException(this, other);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -157,25 +160,42 @@ public class HllAggregatorFactory extends AggregatorFactory {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AggregatorFactory withName(String newName) {
|
||||||
|
return new HllAggregatorFactory(newName, fieldName, precision, round);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object) {
|
public Object deserialize(Object object) {
|
||||||
|
if (object == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
return HllUtils.deserializeHll(object);
|
return HllUtils.deserializeHll(object);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getResultType() {
|
||||||
|
//return round ? ColumnType.LONG : ColumnType.DOUBLE;
|
||||||
|
return getIntermediateType();
|
||||||
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public Object finalizeComputation(@Nullable Object object) {
|
public Object finalizeComputation(@Nullable Object object) {
|
||||||
if (object == null) {
|
if (object == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
final Hll hll = (Hll) object;
|
|
||||||
|
return object;
|
||||||
|
|
||||||
|
/*final Hll hll = (Hll) object;
|
||||||
final double estimate = hll.size();
|
final double estimate = hll.size();
|
||||||
|
|
||||||
if (round) {
|
if (round) {
|
||||||
return Math.round(estimate);
|
return Math.round(estimate);
|
||||||
} else {
|
} else {
|
||||||
return estimate;
|
return estimate;
|
||||||
}
|
}*/
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -199,9 +219,16 @@ public class HllAggregatorFactory extends AggregatorFactory {
|
|||||||
return round;
|
return round;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
没这个方法了, 新版本需要实现getIntermediateType方法
|
||||||
@Override
|
@Override
|
||||||
public String getTypeName() {
|
public String getTypeName() {
|
||||||
return HllModule.HLLD_BUILD_TYPE_NAME;
|
return HllModule.HLLD_BUILD_TYPE_NAME;
|
||||||
|
}*/
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getIntermediateType() {
|
||||||
|
return HllModule.BUILD_TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -211,6 +238,10 @@ public class HllAggregatorFactory extends AggregatorFactory {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getMaxIntermediateSize() {
|
public int getMaxIntermediateSize() {
|
||||||
|
return updatableSerializationBytes == 0? getUpdatableSerializationBytes():updatableSerializationBytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected int getUpdatableSerializationBytes(){
|
||||||
return Hll.getUpdatableSerializationBytes(precision);
|
return Hll.getUpdatableSerializationBytes(precision);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,10 +4,12 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||||||
import com.zdjz.galaxy.sketch.hlld.Hll;
|
import com.zdjz.galaxy.sketch.hlld.Hll;
|
||||||
import com.zdjz.galaxy.sketch.hlld.HllUnion;
|
import com.zdjz.galaxy.sketch.hlld.HllUnion;
|
||||||
import org.apache.druid.query.aggregation.Aggregator;
|
import org.apache.druid.query.aggregation.Aggregator;
|
||||||
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.BufferAggregator;
|
import org.apache.druid.query.aggregation.BufferAggregator;
|
||||||
import org.apache.druid.query.cache.CacheKeyBuilder;
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
import org.apache.druid.segment.ColumnSelectorFactory;
|
import org.apache.druid.segment.ColumnSelectorFactory;
|
||||||
import org.apache.druid.segment.ColumnValueSelector;
|
import org.apache.druid.segment.ColumnValueSelector;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
|
||||||
@@ -21,9 +23,16 @@ public class HllMergeAggregatorFactory extends HllAggregatorFactory{
|
|||||||
super(name, fieldName, precision, round);
|
super(name, fieldName, precision, round);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
没这个方法了, 新版本需要实现getIntermediateType方法
|
||||||
@Override
|
@Override
|
||||||
public String getTypeName(){
|
public String getTypeName(){
|
||||||
return HllModule.HLLD_TYPE_NAME;
|
return HllModule.HLLD_TYPE_NAME;
|
||||||
|
}*/
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColumnType getIntermediateType() {
|
||||||
|
return HllModule.TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -44,6 +53,11 @@ public class HllMergeAggregatorFactory extends HllAggregatorFactory{
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AggregatorFactory withName(String newName) {
|
||||||
|
return new HllMergeAggregatorFactory(newName, fieldName, precision, round);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getCacheKey() {
|
public byte[] getCacheKey() {
|
||||||
return new CacheKeyBuilder(HllModule.CACHE_TYPE_ID_OFFSET).appendByte(HllModule.HLLD_MERGE_CACHE_TYPE_ID)
|
return new CacheKeyBuilder(HllModule.CACHE_TYPE_ID_OFFSET).appendByte(HllModule.HLLD_MERGE_CACHE_TYPE_ID)
|
||||||
@@ -53,7 +67,7 @@ public class HllMergeAggregatorFactory extends HllAggregatorFactory{
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getMaxIntermediateSize() {
|
protected int getUpdatableSerializationBytes() {
|
||||||
return HllUnion.getUpdatableSerializationBytes(precision);
|
return HllUnion.getUpdatableSerializationBytes(precision);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import org.apache.druid.initialization.DruidModule;
|
|||||||
import org.apache.druid.query.aggregation.sketch.hlld.sql.HllApproxCountDistinctSqlAggregator;
|
import org.apache.druid.query.aggregation.sketch.hlld.sql.HllApproxCountDistinctSqlAggregator;
|
||||||
import org.apache.druid.query.aggregation.sketch.hlld.sql.HllEstimateOperatorConversion;
|
import org.apache.druid.query.aggregation.sketch.hlld.sql.HllEstimateOperatorConversion;
|
||||||
import org.apache.druid.query.aggregation.sketch.hlld.sql.HllObjectSqlAggregator;
|
import org.apache.druid.query.aggregation.sketch.hlld.sql.HllObjectSqlAggregator;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
import org.apache.druid.segment.serde.ComplexMetrics;
|
import org.apache.druid.segment.serde.ComplexMetrics;
|
||||||
import org.apache.druid.sql.guice.SqlBindings;
|
import org.apache.druid.sql.guice.SqlBindings;
|
||||||
|
|
||||||
@@ -24,6 +25,9 @@ public class HllModule implements DruidModule {
|
|||||||
|
|
||||||
public static final String HLLD_TYPE_NAME = "HLLDSketch";
|
public static final String HLLD_TYPE_NAME = "HLLDSketch";
|
||||||
public static final String HLLD_BUILD_TYPE_NAME = "HLLDSketchBuild";
|
public static final String HLLD_BUILD_TYPE_NAME = "HLLDSketchBuild";
|
||||||
|
public static final ColumnType TYPE = ColumnType.ofComplex(HLLD_TYPE_NAME);
|
||||||
|
public static final ColumnType BUILD_TYPE = ColumnType.ofComplex(HLLD_BUILD_TYPE_NAME);
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void configure(Binder binder) {
|
public void configure(Binder binder) {
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ import org.apache.druid.query.aggregation.AggregatorFactory;
|
|||||||
import org.apache.druid.query.aggregation.PostAggregator;
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
import org.apache.druid.query.aggregation.post.ArithmeticPostAggregator;
|
import org.apache.druid.query.aggregation.post.ArithmeticPostAggregator;
|
||||||
import org.apache.druid.query.cache.CacheKeyBuilder;
|
import org.apache.druid.query.cache.CacheKeyBuilder;
|
||||||
|
import org.apache.druid.segment.ColumnInspector;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
|
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@@ -29,6 +31,12 @@ public class HllToEstimatePostAggregator implements PostAggregator {
|
|||||||
this.round = round;
|
this.round = round;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 新版本需要实现的方法
|
||||||
|
@Override
|
||||||
|
public ColumnType getType(ColumnInspector signature) {
|
||||||
|
return round ? ColumnType.LONG : ColumnType.DOUBLE;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
public String getName() {
|
public String getName() {
|
||||||
@@ -58,6 +66,9 @@ public class HllToEstimatePostAggregator implements PostAggregator {
|
|||||||
@Override
|
@Override
|
||||||
public Object compute(final Map<String, Object> combinedAggregators) {
|
public Object compute(final Map<String, Object> combinedAggregators) {
|
||||||
final Hll sketch = (Hll) field.compute(combinedAggregators);
|
final Hll sketch = (Hll) field.compute(combinedAggregators);
|
||||||
|
if(sketch == null){
|
||||||
|
return round ? 0L: 0D;
|
||||||
|
}
|
||||||
return round ? Math.round(sketch.size()) : sketch.size();
|
return round ? Math.round(sketch.size()) : sketch.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -5,36 +5,44 @@ import org.apache.calcite.sql.SqlFunctionCategory;
|
|||||||
import org.apache.calcite.sql.SqlKind;
|
import org.apache.calcite.sql.SqlKind;
|
||||||
import org.apache.calcite.sql.type.*;
|
import org.apache.calcite.sql.type.*;
|
||||||
import org.apache.druid.query.aggregation.AggregatorFactory;
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
|
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
|
||||||
import org.apache.druid.query.aggregation.post.FinalizingFieldAccessPostAggregator;
|
import org.apache.druid.query.aggregation.post.FinalizingFieldAccessPostAggregator;
|
||||||
import org.apache.druid.segment.VirtualColumn;
|
import org.apache.druid.query.aggregation.sketch.hlld.HllAggregatorFactory;
|
||||||
|
import org.apache.druid.query.aggregation.sketch.hlld.HllToEstimatePostAggregator;
|
||||||
import org.apache.druid.sql.calcite.aggregation.Aggregation;
|
import org.apache.druid.sql.calcite.aggregation.Aggregation;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class HllApproxCountDistinctSqlAggregator extends HllBaseSqlAggregator {
|
public class HllApproxCountDistinctSqlAggregator extends HllBaseSqlAggregator {
|
||||||
private static final SqlAggFunction FUNCTION_INSTANCE = new CPCSketchApproxCountDistinctSqlAggFunction();
|
private static final SqlAggFunction FUNCTION_INSTANCE = new CPCSketchApproxCountDistinctSqlAggFunction();
|
||||||
private static final String NAME = "APPROX_COUNT_DISTINCT_HLLD";
|
private static final String NAME = "APPROX_COUNT_DISTINCT_HLLD";
|
||||||
|
|
||||||
|
public HllApproxCountDistinctSqlAggregator(){
|
||||||
|
super(true);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqlAggFunction calciteFunction() {
|
public SqlAggFunction calciteFunction() {
|
||||||
return FUNCTION_INSTANCE;
|
return FUNCTION_INSTANCE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 新版本参数少了virtualColumns
|
||||||
@Override
|
@Override
|
||||||
protected Aggregation toAggregation(
|
protected Aggregation toAggregation(
|
||||||
String name,
|
String name,
|
||||||
boolean finalizeAggregations,
|
boolean finalizeAggregations,
|
||||||
List<VirtualColumn> virtualColumns,
|
|
||||||
AggregatorFactory aggregatorFactory
|
AggregatorFactory aggregatorFactory
|
||||||
) {
|
) {
|
||||||
return Aggregation.create(
|
return Aggregation.create(
|
||||||
virtualColumns,
|
|
||||||
Collections.singletonList(aggregatorFactory),
|
Collections.singletonList(aggregatorFactory),
|
||||||
//感觉是否是最外层的函数吧
|
//感觉是否是最外层的函数吧
|
||||||
finalizeAggregations ? new FinalizingFieldAccessPostAggregator(
|
finalizeAggregations ? new HllToEstimatePostAggregator(
|
||||||
name,
|
name,
|
||||||
aggregatorFactory.getName()
|
new FieldAccessPostAggregator(
|
||||||
|
aggregatorFactory.getName(),
|
||||||
|
aggregatorFactory.getName()
|
||||||
|
),
|
||||||
|
((HllAggregatorFactory)aggregatorFactory).isRound()
|
||||||
) : null
|
) : null
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package org.apache.druid.query.aggregation.sketch.hlld.sql;
|
|||||||
|
|
||||||
import org.apache.calcite.rel.core.AggregateCall;
|
import org.apache.calcite.rel.core.AggregateCall;
|
||||||
import org.apache.calcite.rel.core.Project;
|
import org.apache.calcite.rel.core.Project;
|
||||||
|
import org.apache.calcite.rel.type.RelDataType;
|
||||||
import org.apache.calcite.rex.RexBuilder;
|
import org.apache.calcite.rex.RexBuilder;
|
||||||
import org.apache.calcite.rex.RexLiteral;
|
import org.apache.calcite.rex.RexLiteral;
|
||||||
import org.apache.calcite.rex.RexNode;
|
import org.apache.calcite.rex.RexNode;
|
||||||
@@ -14,6 +15,7 @@ import org.apache.druid.query.aggregation.sketch.hlld.HllMergeAggregatorFactory;
|
|||||||
import org.apache.druid.query.dimension.DefaultDimensionSpec;
|
import org.apache.druid.query.dimension.DefaultDimensionSpec;
|
||||||
import org.apache.druid.query.dimension.DimensionSpec;
|
import org.apache.druid.query.dimension.DimensionSpec;
|
||||||
import org.apache.druid.segment.VirtualColumn;
|
import org.apache.druid.segment.VirtualColumn;
|
||||||
|
import org.apache.druid.segment.column.ColumnType;
|
||||||
import org.apache.druid.segment.column.RowSignature;
|
import org.apache.druid.segment.column.RowSignature;
|
||||||
import org.apache.druid.segment.column.ValueType;
|
import org.apache.druid.segment.column.ValueType;
|
||||||
import org.apache.druid.sql.calcite.aggregation.Aggregation;
|
import org.apache.druid.sql.calcite.aggregation.Aggregation;
|
||||||
@@ -29,6 +31,13 @@ import java.util.ArrayList;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public abstract class HllBaseSqlAggregator implements SqlAggregator {
|
public abstract class HllBaseSqlAggregator implements SqlAggregator {
|
||||||
|
|
||||||
|
private final boolean finalizeSketch;
|
||||||
|
|
||||||
|
protected HllBaseSqlAggregator(boolean finalizeSketch){
|
||||||
|
this.finalizeSketch = finalizeSketch;
|
||||||
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public Aggregation toDruidAggregation(
|
public Aggregation toDruidAggregation(
|
||||||
@@ -93,13 +102,14 @@ public abstract class HllBaseSqlAggregator implements SqlAggregator {
|
|||||||
round = HllAggregatorFactory.DEFAULT_ROUND;
|
round = HllAggregatorFactory.DEFAULT_ROUND;
|
||||||
}
|
}
|
||||||
|
|
||||||
final List<VirtualColumn> virtualColumns = new ArrayList<>();
|
// 新版本删除了final List<VirtualColumn> virtualColumns = new ArrayList<>();
|
||||||
final AggregatorFactory aggregatorFactory;
|
final AggregatorFactory aggregatorFactory;
|
||||||
final String aggregatorName = finalizeAggregations ? Calcites.makePrefixedName(name, "a") : name;
|
//final String aggregatorName = finalizeAggregations ? Calcites.makePrefixedName(name, "a") : name;
|
||||||
|
final String aggregatorName = finalizeSketch ? Calcites.makePrefixedName(name, "a") : name;
|
||||||
|
|
||||||
// 输入是Cpc,返回HllMergeAggregatorFactory
|
// 输入是Hll,返回HllSketchMergeAggregatorFactory
|
||||||
if (columnArg.isDirectColumnAccess()
|
if (columnArg.isDirectColumnAccess()
|
||||||
&& rowSignature.getColumnType(columnArg.getDirectColumn()).orElse(null) == ValueType.COMPLEX) {
|
&& rowSignature.getColumnType(columnArg.getDirectColumn()).map(type -> type.is(ValueType.COMPLEX)).orElse(false)) {
|
||||||
// 这就是具体的聚合函数吧
|
// 这就是具体的聚合函数吧
|
||||||
aggregatorFactory = new HllMergeAggregatorFactory(
|
aggregatorFactory = new HllMergeAggregatorFactory(
|
||||||
aggregatorName,
|
aggregatorName,
|
||||||
@@ -109,10 +119,10 @@ public abstract class HllBaseSqlAggregator implements SqlAggregator {
|
|||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
// 输入是regular column,HllBuildAggregatorFactory
|
// 输入是regular column,HllBuildAggregatorFactory
|
||||||
final SqlTypeName sqlTypeName = columnRexNode.getType().getSqlTypeName();
|
final RelDataType dataType = columnRexNode.getType();
|
||||||
final ValueType inputType = Calcites.getValueTypeForSqlTypeName(sqlTypeName);
|
final ColumnType inputType = Calcites.getColumnTypeForRelDataType(dataType);
|
||||||
if (inputType == null) {
|
if (inputType == null) {
|
||||||
throw new ISE("Cannot translate sqlTypeName[%s] to Druid type for field[%s]", sqlTypeName, aggregatorName);
|
throw new ISE("Cannot translate sqlTypeName[%s] to Druid type for field[%s]", dataType.getSqlTypeName(), aggregatorName);
|
||||||
}
|
}
|
||||||
|
|
||||||
final DimensionSpec dimensionSpec;
|
final DimensionSpec dimensionSpec;
|
||||||
@@ -120,27 +130,34 @@ public abstract class HllBaseSqlAggregator implements SqlAggregator {
|
|||||||
if (columnArg.isDirectColumnAccess()) {
|
if (columnArg.isDirectColumnAccess()) {
|
||||||
dimensionSpec = columnArg.getSimpleExtraction().toDimensionSpec(null, inputType);
|
dimensionSpec = columnArg.getSimpleExtraction().toDimensionSpec(null, inputType);
|
||||||
} else {
|
} else {
|
||||||
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
|
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
|
||||||
plannerContext,
|
|
||||||
columnArg,
|
columnArg,
|
||||||
sqlTypeName
|
dataType
|
||||||
);
|
);
|
||||||
dimensionSpec = new DefaultDimensionSpec(virtualColumn.getOutputName(), null, inputType);
|
dimensionSpec = new DefaultDimensionSpec(virtualColumnName, null, inputType);
|
||||||
virtualColumns.add(virtualColumn);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
aggregatorFactory = new HllAggregatorFactory(
|
// 新版本的判断,输入是Hll
|
||||||
aggregatorName,
|
if (inputType.is(ValueType.COMPLEX)) {
|
||||||
dimensionSpec.getDimension(),
|
aggregatorFactory = new HllMergeAggregatorFactory(
|
||||||
precision,
|
aggregatorName,
|
||||||
round
|
dimensionSpec.getOutputName(),
|
||||||
);
|
precision,
|
||||||
|
round
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
aggregatorFactory = new HllAggregatorFactory(
|
||||||
|
aggregatorName,
|
||||||
|
dimensionSpec.getDimension(),
|
||||||
|
precision,
|
||||||
|
round
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return toAggregation(
|
return toAggregation(
|
||||||
name,
|
name,
|
||||||
finalizeAggregations,
|
finalizeSketch,
|
||||||
virtualColumns,
|
|
||||||
aggregatorFactory
|
aggregatorFactory
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -148,7 +165,6 @@ public abstract class HllBaseSqlAggregator implements SqlAggregator {
|
|||||||
protected abstract Aggregation toAggregation(
|
protected abstract Aggregation toAggregation(
|
||||||
String name,
|
String name,
|
||||||
boolean finalizeAggregations,
|
boolean finalizeAggregations,
|
||||||
List<VirtualColumn> virtualColumns,
|
|
||||||
AggregatorFactory aggregatorFactory
|
AggregatorFactory aggregatorFactory
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,16 +13,15 @@ import org.apache.druid.query.aggregation.PostAggregator;
|
|||||||
import org.apache.druid.query.aggregation.sketch.hlld.HllAggregatorFactory;
|
import org.apache.druid.query.aggregation.sketch.hlld.HllAggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.sketch.hlld.HllToEstimatePostAggregator;
|
import org.apache.druid.query.aggregation.sketch.hlld.HllToEstimatePostAggregator;
|
||||||
import org.apache.druid.segment.column.RowSignature;
|
import org.apache.druid.segment.column.RowSignature;
|
||||||
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
|
import org.apache.druid.sql.calcite.expression.*;
|
||||||
import org.apache.druid.sql.calcite.expression.DruidExpression;
|
|
||||||
import org.apache.druid.sql.calcite.expression.OperatorConversions;
|
|
||||||
import org.apache.druid.sql.calcite.expression.PostAggregatorVisitor;
|
|
||||||
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class HllEstimateOperatorConversion extends DirectOperatorConversion {
|
// postAggregator, toDruidExpression返回null。相当于post udf和普通udf是不一样的。
|
||||||
|
// 新版本直接修改了父类
|
||||||
|
public class HllEstimateOperatorConversion implements SqlOperatorConversion {
|
||||||
private static final String FUNCTION_NAME = "HLLD_ESTIMATE";
|
private static final String FUNCTION_NAME = "HLLD_ESTIMATE";
|
||||||
private static final SqlFunction SQL_FUNCTION = OperatorConversions
|
private static final SqlFunction SQL_FUNCTION = OperatorConversions
|
||||||
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
|
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
|
||||||
@@ -32,9 +31,7 @@ public class HllEstimateOperatorConversion extends DirectOperatorConversion {
|
|||||||
.returnTypeInference(ReturnTypes.DOUBLE)
|
.returnTypeInference(ReturnTypes.DOUBLE)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
public HllEstimateOperatorConversion() {
|
// 新版本少了构造函数
|
||||||
super(SQL_FUNCTION, FUNCTION_NAME);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqlOperator calciteOperator() {
|
public SqlOperator calciteOperator() {
|
||||||
@@ -63,7 +60,8 @@ public class HllEstimateOperatorConversion extends DirectOperatorConversion {
|
|||||||
plannerContext,
|
plannerContext,
|
||||||
rowSignature,
|
rowSignature,
|
||||||
operands.get(0),
|
operands.get(0),
|
||||||
postAggregatorVisitor
|
postAggregatorVisitor,
|
||||||
|
true // 新版本多了个参数
|
||||||
);
|
);
|
||||||
|
|
||||||
if (firstOperand == null) {
|
if (firstOperand == null) {
|
||||||
|
|||||||
@@ -5,16 +5,18 @@ import org.apache.calcite.sql.SqlFunctionCategory;
|
|||||||
import org.apache.calcite.sql.SqlKind;
|
import org.apache.calcite.sql.SqlKind;
|
||||||
import org.apache.calcite.sql.type.*;
|
import org.apache.calcite.sql.type.*;
|
||||||
import org.apache.druid.query.aggregation.AggregatorFactory;
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.segment.VirtualColumn;
|
|
||||||
import org.apache.druid.sql.calcite.aggregation.Aggregation;
|
import org.apache.druid.sql.calcite.aggregation.Aggregation;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class HllObjectSqlAggregator extends HllBaseSqlAggregator {
|
public class HllObjectSqlAggregator extends HllBaseSqlAggregator {
|
||||||
private static final SqlAggFunction FUNCTION_INSTANCE = new CpcSketchSqlAggFunction();
|
private static final SqlAggFunction FUNCTION_INSTANCE = new CpcSketchSqlAggFunction();
|
||||||
private static final String NAME = "HLLD";
|
private static final String NAME = "HLLD";
|
||||||
|
|
||||||
|
public HllObjectSqlAggregator(){
|
||||||
|
super(false);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqlAggFunction calciteFunction() {
|
public SqlAggFunction calciteFunction() {
|
||||||
return FUNCTION_INSTANCE;
|
return FUNCTION_INSTANCE;
|
||||||
@@ -24,11 +26,9 @@ public class HllObjectSqlAggregator extends HllBaseSqlAggregator {
|
|||||||
protected Aggregation toAggregation(
|
protected Aggregation toAggregation(
|
||||||
String name,
|
String name,
|
||||||
boolean finalizeAggregations,
|
boolean finalizeAggregations,
|
||||||
List<VirtualColumn> virtualColumns,
|
|
||||||
AggregatorFactory aggregatorFactory
|
AggregatorFactory aggregatorFactory
|
||||||
) {
|
) {
|
||||||
return Aggregation.create(
|
return Aggregation.create(
|
||||||
virtualColumns,
|
|
||||||
Collections.singletonList(aggregatorFactory),
|
Collections.singletonList(aggregatorFactory),
|
||||||
null
|
null
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,83 +1,64 @@
|
|||||||
package org.apache.druid.query.aggregation.sketch.hlld.sql;
|
package org.apache.druid.query.aggregation.sketch.hlld.sql;
|
||||||
|
|
||||||
|
|
||||||
|
import com.alibaba.fastjson2.JSON;
|
||||||
import com.fasterxml.jackson.databind.Module;
|
import com.fasterxml.jackson.databind.Module;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.inject.Injector;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import org.apache.druid.guice.DruidInjectorBuilder;
|
||||||
import org.apache.calcite.schema.SchemaPlus;
|
|
||||||
import org.apache.druid.java.util.common.io.Closer;
|
|
||||||
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
|
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
|
||||||
import org.apache.druid.query.aggregation.CountAggregatorFactory;
|
|
||||||
import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
|
|
||||||
import org.apache.druid.query.aggregation.sketch.hlld.HllAggregatorFactory;
|
|
||||||
import org.apache.druid.query.aggregation.sketch.hlld.HllModule;
|
import org.apache.druid.query.aggregation.sketch.hlld.HllModule;
|
||||||
import org.apache.druid.segment.IndexBuilder;
|
|
||||||
import org.apache.druid.segment.QueryableIndex;
|
import org.apache.druid.segment.QueryableIndex;
|
||||||
import org.apache.druid.segment.TestHelper;
|
import org.apache.druid.segment.TestHelper;
|
||||||
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
|
import org.apache.druid.segment.join.JoinableFactoryWrapper;
|
||||||
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
|
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
|
||||||
import org.apache.druid.server.QueryStackTests;
|
import org.apache.druid.sql.calcite.QueryTestBuilder;
|
||||||
import org.apache.druid.server.security.AuthTestUtils;
|
import org.apache.druid.sql.calcite.QueryTestRunner;
|
||||||
import org.apache.druid.server.security.AuthenticationResult;
|
|
||||||
import org.apache.druid.sql.SqlLifecycle;
|
|
||||||
import org.apache.druid.sql.SqlLifecycleFactory;
|
|
||||||
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
|
|
||||||
import org.apache.druid.sql.calcite.planner.PlannerConfig;
|
|
||||||
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
|
||||||
import org.apache.druid.sql.calcite.planner.PlannerFactory;
|
|
||||||
import org.apache.druid.sql.calcite.util.CalciteTestBase;
|
|
||||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||||
import org.apache.druid.sql.calcite.util.QueryLogHook;
|
|
||||||
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
|
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
|
||||||
import org.apache.druid.timeline.DataSegment;
|
import org.apache.druid.timeline.DataSegment;
|
||||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
import org.junit.rules.TemporaryFolder;
|
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.*;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
// 新版本父类直接变了,实现更简单了
|
||||||
private static final String DATA_SOURCE = "foo";
|
public class HllApproxCountDistinctSqlAggregatorTest extends BaseCalciteQueryTest {
|
||||||
private static final boolean ROUND = true;
|
private static final boolean ROUND = true;
|
||||||
private static final Map<String, Object> QUERY_CONTEXT_DEFAULT = ImmutableMap.of(
|
|
||||||
PlannerContext.CTX_SQL_QUERY_ID, "dummy"
|
|
||||||
);
|
|
||||||
private static QueryRunnerFactoryConglomerate conglomerate;
|
|
||||||
private static Closer resourceCloser;
|
|
||||||
private static AuthenticationResult authenticationResult = CalciteTests.REGULAR_USER_AUTH_RESULT;
|
|
||||||
|
|
||||||
@Rule
|
@Override
|
||||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
public void gatherProperties(Properties properties)
|
||||||
|
{
|
||||||
@Rule
|
super.gatherProperties(properties);
|
||||||
public QueryLogHook queryLogHook = QueryLogHook.create(TestHelper.JSON_MAPPER);
|
|
||||||
|
|
||||||
private SpecificSegmentsQuerySegmentWalker walker;
|
|
||||||
private SqlLifecycleFactory sqlLifecycleFactory;
|
|
||||||
|
|
||||||
@BeforeClass
|
|
||||||
public static void setUpClass() {
|
|
||||||
resourceCloser = Closer.create();
|
|
||||||
conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(resourceCloser);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@Override
|
||||||
public static void tearDownClass() throws IOException {
|
public void configureGuice(DruidInjectorBuilder builder)
|
||||||
resourceCloser.close();
|
{
|
||||||
|
super.configureGuice(builder);
|
||||||
|
builder.addModule(new HllModule());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setUp() throws Exception {
|
|
||||||
|
@SuppressWarnings("resource")
|
||||||
|
@Override
|
||||||
|
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
|
||||||
|
final QueryRunnerFactoryConglomerate conglomerate,
|
||||||
|
final JoinableFactoryWrapper joinableFactory,
|
||||||
|
final Injector injector
|
||||||
|
) throws IOException
|
||||||
|
{
|
||||||
HllModule.registerSerde();
|
HllModule.registerSerde();
|
||||||
for (Module mod : new HllModule().getJacksonModules()) {
|
for (Module mod : new HllModule().getJacksonModules()) {
|
||||||
CalciteTests.getJsonMapper().registerModule(mod);
|
CalciteTests.getJsonMapper().registerModule(mod);
|
||||||
TestHelper.JSON_MAPPER.registerModule(mod);
|
TestHelper.JSON_MAPPER.registerModule(mod);
|
||||||
}
|
}
|
||||||
|
|
||||||
final QueryableIndex index = IndexBuilder.create()
|
final QueryableIndex index = TestHelper.getTestIndexIO().loadIndex(new File("D:/doc/datas/testIndex-1369101812"));
|
||||||
|
//final QueryableIndex index = TestHelper.getTestIndexIO().loadIndex(new File("D:/doc/datas/9_index"));
|
||||||
|
/*final QueryableIndex index = IndexBuilder.create()
|
||||||
.tmpDir(temporaryFolder.newFolder())
|
.tmpDir(temporaryFolder.newFolder())
|
||||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||||
.schema(
|
.schema(
|
||||||
@@ -95,12 +76,12 @@ public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
|||||||
.withRollup(false)
|
.withRollup(false)
|
||||||
.build()
|
.build()
|
||||||
)
|
)
|
||||||
.rows(CalciteTests.ROWS1)
|
.rows(TestDataBuilder.ROWS1)
|
||||||
.buildMMappedIndex();
|
.buildMMappedIndex();*/
|
||||||
|
|
||||||
walker = new SpecificSegmentsQuerySegmentWalker(conglomerate).add(
|
return new SpecificSegmentsQuerySegmentWalker(conglomerate).add(
|
||||||
DataSegment.builder()
|
DataSegment.builder()
|
||||||
.dataSource(DATA_SOURCE)
|
.dataSource(CalciteTests.DATASOURCE1)
|
||||||
.interval(index.getDataInterval())
|
.interval(index.getDataInterval())
|
||||||
.version("1")
|
.version("1")
|
||||||
.shardSpec(new LinearShardSpec(0))
|
.shardSpec(new LinearShardSpec(0))
|
||||||
@@ -108,45 +89,80 @@ public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
|||||||
.build(),
|
.build(),
|
||||||
index
|
index
|
||||||
);
|
);
|
||||||
|
|
||||||
final PlannerConfig plannerConfig = new PlannerConfig();
|
|
||||||
final DruidOperatorTable operatorTable = new DruidOperatorTable(
|
|
||||||
ImmutableSet.of(
|
|
||||||
new HllApproxCountDistinctSqlAggregator(),
|
|
||||||
new HllObjectSqlAggregator()
|
|
||||||
),
|
|
||||||
ImmutableSet.of(
|
|
||||||
new HllEstimateOperatorConversion()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
SchemaPlus rootSchema = CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
|
|
||||||
sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(
|
|
||||||
new PlannerFactory(
|
|
||||||
rootSchema,
|
|
||||||
CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate),
|
|
||||||
operatorTable,
|
|
||||||
CalciteTests.createExprMacroTable(),
|
|
||||||
plannerConfig,
|
|
||||||
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
|
|
||||||
CalciteTests.getJsonMapper(),
|
|
||||||
CalciteTests.DRUID_SCHEMA_NAME
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
@After
|
|
||||||
public void tearDown() throws Exception {
|
|
||||||
walker.close();
|
|
||||||
walker = null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQuery() throws Exception {
|
public void testSqlQuery() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
// Can't vectorize due to SUBSTRING expression.
|
||||||
String sql = "select * from druid.foo";
|
cannotVectorize();
|
||||||
final List<Object[]> results =
|
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
String[] columns = new String[]{"__time", "dim1", "dim2", "dim3", "cnt", "hll_dim1", "m1"};
|
||||||
|
|
||||||
|
String sql = "select " + String.join(",", columns) + " from druid.foo";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql);
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
Map row = new LinkedHashMap();
|
||||||
|
for (int i = 0; i < result.length; i++) {
|
||||||
|
row.put(columns[i], result[i]);
|
||||||
|
}
|
||||||
|
System.out.println(JSON.toJSONString(row));
|
||||||
|
// System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < columns.length; i++) {
|
||||||
|
Object[] values = new Object[results.size()];
|
||||||
|
for (int j = 0; j < results.size(); j++) {
|
||||||
|
values[j] = results.get(j)[i];
|
||||||
|
}
|
||||||
|
System.out.println(columns[i] + ":" + Arrays.toString(values));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery11() throws Exception {
|
||||||
|
// Can't vectorize due to SUBSTRING expression.
|
||||||
|
//cannotVectorize();
|
||||||
|
|
||||||
|
|
||||||
|
String sql = "select HLLD(hll_dim1) hll_dim1 from (select hll_dim1 from druid.foo limit 5) t ";
|
||||||
|
//sql = "select HLLD(hll_dim1) hll_dim1 from druid.foo t ";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();;
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery12() throws Exception {
|
||||||
|
// Can't vectorize due to SUBSTRING expression.
|
||||||
|
cannotVectorize();
|
||||||
|
|
||||||
|
String sql = "select * from (select * from druid.foo limit 6) t where __time >= '1970-12-15 07:00:28' and __time < '2023-12-15 08:10:28' ";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql);
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery1() throws Exception {
|
||||||
|
// Can't vectorize due to SUBSTRING expression.
|
||||||
|
cannotVectorize();
|
||||||
|
|
||||||
|
String sql = "select dim1 from druid.foo";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql);
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -154,37 +170,124 @@ public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSqlQuery2() throws Exception {
|
public void testSqlQuery2() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
//cannotVectorize();
|
||||||
String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = '1'";
|
||||||
final List<Object[]> results =
|
// Caused by: org.apache.calcite.sql.validate.SqlValidatorException: Aggregate expressions cannot be nested
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)), APPROX_COUNT_DISTINCT_HLLD(HLLD(hll_dim1)), HLLD(hll_dim1) from druid.foo";
|
||||||
|
String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)), APPROX_COUNT_DISTINCT_HLLD(hll_dim1), HLLD(hll_dim1) from (select HLLD(hll_dim1) hll_dim1 from druid.foo) t";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAgg() throws Exception {
|
public void testSqlQuery3() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
//cannotVectorize();
|
||||||
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
|
String sql = "select APPROX_COUNT_DISTINCT_HLLD(hll, 12) from (select HLLD(hll_dim1) hll from druid.foo where dim1 = '1') t ";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery4() throws Exception {
|
||||||
|
//cannotVectorize();
|
||||||
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
|
String sql = "select APPROX_COUNT_DISTINCT_HLLD(hll, 12) from (select HLLD(hll_dim1) hll from druid.foo where dim1 = '1') t ";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery5() throws Exception {
|
||||||
|
//cannotVectorize();
|
||||||
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
|
String sql = "select dim1,APPROX_COUNT_DISTINCT_HLLD(hll, 12) from (select dim1,HLLD(hll_dim1) hll from druid.foo where dim1 = '1' group by dim1) t group by dim1";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery6() throws Exception {
|
||||||
|
//cannotVectorize();
|
||||||
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
|
String sql = "select dim1,APPROX_COUNT_DISTINCT_HLLD(hll, 12) from (select dim1,HLLD(dim1) hll from druid.foo where dim1 = '1' group by dim1 limit 10) t group by dim1";
|
||||||
|
//String sql = "select dim1,HLLD_ESTIMATE(HLLD(hll), false) from (select dim1,HLLD(dim1) hll from druid.foo where dim1 = '1' group by dim1 limit 10) t group by dim1";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery62() throws Exception {
|
||||||
|
//cannotVectorize();
|
||||||
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
|
String sql = "select dim1,APPROX_COUNT_DISTINCT_HLLD(hll) from (select dim1,HLLD(dim1) hll from druid.foo where dim1 = '1' group by dim1 limit 10) t group by dim1";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSqlQuery7() throws Exception {
|
||||||
|
//cannotVectorize();
|
||||||
|
//String sql = "select HLLD_ESTIMATE(HLLD(hll_dim1)) from druid.foo where dim1 = ''";
|
||||||
|
String sql = "select dim1,APPROX_COUNT_DISTINCT_HLLD(hll, 12) from (select dim1,HLLD(dim1) hll from druid.foo where dim1 = '1' group by dim1) t group by dim1 limit 10";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAgg() throws Exception {
|
||||||
final String sql = "SELECT\n"
|
final String sql = "SELECT\n"
|
||||||
+ " SUM(cnt),\n"
|
+ " SUM(cnt),\n"
|
||||||
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1)\n"
|
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1)\n"
|
||||||
+ "FROM druid.foo";
|
+ "FROM druid.foo";
|
||||||
|
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
for (Object[] result : results) {
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
System.out.println(Arrays.toString(result));
|
List<Object[]> results = queryResults.results;
|
||||||
}
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
@Test
|
|
||||||
public void testDistinct() throws Exception {
|
public void testDistinct() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
|
|
||||||
final String sql = "SELECT\n"
|
final String sql = "SELECT\n"
|
||||||
+ " SUM(cnt),\n"
|
+ " SUM(cnt),\n"
|
||||||
+ " APPROX_COUNT_DISTINCT_HLLD(dim2),\n" // uppercase
|
+ " APPROX_COUNT_DISTINCT_HLLD(dim2),\n" // uppercase
|
||||||
@@ -195,18 +298,17 @@ public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
|||||||
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1)\n" // on native HllSketch column
|
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1)\n" // on native HllSketch column
|
||||||
+ "FROM druid.foo";
|
+ "FROM druid.foo";
|
||||||
|
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
for (Object[] result : results) {
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
System.out.println(Arrays.toString(result));
|
List<Object[]> results = queryResults.results;
|
||||||
}
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDistinct2() throws Exception {
|
public void testDistinct2() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
|
|
||||||
final String sql = "SELECT\n"
|
final String sql = "SELECT\n"
|
||||||
+ " SUM(cnt),\n"
|
+ " SUM(cnt),\n"
|
||||||
+ " APPROX_COUNT_DISTINCT_HLLD(dim2),\n"
|
+ " APPROX_COUNT_DISTINCT_HLLD(dim2),\n"
|
||||||
@@ -219,8 +321,26 @@ public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
|||||||
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1)\n" // on native HllSketch column
|
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1)\n" // on native HllSketch column
|
||||||
+ "FROM druid.foo";
|
+ "FROM druid.foo";
|
||||||
|
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
|
for (Object[] result : results) {
|
||||||
|
System.out.println(Arrays.toString(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDistinctDebug2() throws Exception {
|
||||||
|
final String sql = "SELECT\n"
|
||||||
|
+ " dim1, dim2\n"
|
||||||
|
+ "FROM druid.foo";
|
||||||
|
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -229,15 +349,15 @@ public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDistinctDebug() throws Exception {
|
public void testDistinctDebug() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
|
|
||||||
final String sql = "SELECT\n"
|
final String sql = "SELECT\n"
|
||||||
+ " SUM(cnt),\n"
|
+ " SUM(cnt),\n"
|
||||||
+ " APPROX_COUNT_DISTINCT_HLLD(dim2)\n"
|
+ " APPROX_COUNT_DISTINCT_HLLD(dim2)\n"
|
||||||
+ "FROM druid.foo";
|
+ "FROM druid.foo";
|
||||||
|
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -246,14 +366,14 @@ public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeser() throws Exception {
|
public void testDeser() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
|
|
||||||
final String sql = "SELECT\n"
|
final String sql = "SELECT\n"
|
||||||
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1) cnt\n"
|
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1) cnt\n"
|
||||||
+ "FROM druid.foo";
|
+ "FROM druid.foo";
|
||||||
|
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -263,30 +383,29 @@ public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGroupBy() throws Exception {
|
public void testGroupBy() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
|
|
||||||
final String sql = "SELECT cnt,\n"
|
final String sql = "SELECT cnt,\n"
|
||||||
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1, 14) cnt2\n"
|
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1, 14) cnt2\n"
|
||||||
+ "FROM druid.foo group by cnt";
|
+ "FROM druid.foo group by cnt";
|
||||||
|
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGroupBy1() throws Exception {
|
public void testGroupBy1() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
|
|
||||||
final String sql = "SELECT __time,\n"
|
final String sql = "SELECT __time,\n"
|
||||||
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1, 14) cnt\n"
|
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1, 14) cnt\n"
|
||||||
+ "FROM druid.foo group by __time";
|
+ "FROM druid.foo group by __time";
|
||||||
|
|
||||||
final List<Object[]> results =
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
builder.run();
|
||||||
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
@@ -295,14 +414,13 @@ public class HllApproxCountDistinctSqlAggregatorTest extends CalciteTestBase {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGroupBy2() throws Exception {
|
public void testGroupBy2() throws Exception {
|
||||||
SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
|
|
||||||
|
|
||||||
final String sql = "SELECT __time,\n"
|
final String sql = "SELECT __time,\n"
|
||||||
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1, 14) cnt\n"
|
+ " APPROX_COUNT_DISTINCT_HLLD(hll_dim1, 14) cnt\n"
|
||||||
+ "FROM druid.foo group by __time order by cnt desc";
|
+ "FROM druid.foo group by __time order by cnt desc";
|
||||||
|
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
|
||||||
final List<Object[]> results =
|
builder.run();
|
||||||
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
|
QueryTestRunner.QueryResults queryResults = builder.results();
|
||||||
|
List<Object[]> results = queryResults.results;
|
||||||
for (Object[] result : results) {
|
for (Object[] result : results) {
|
||||||
System.out.println(Arrays.toString(result));
|
System.out.println(Arrays.toString(result));
|
||||||
}
|
}
|
||||||
|
|||||||
143
druid-udf/pom.xml
Normal file
143
druid-udf/pom.xml
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<groupId>org.example</groupId>
|
||||||
|
<artifactId>druid-udf_26.0.0</artifactId>
|
||||||
|
<name>druid-udf</name>
|
||||||
|
<version>1.0-SNAPSHOT</version>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<maven.compiler.source>11</maven.compiler.source>
|
||||||
|
<maven.compiler.target>11</maven.compiler.target>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
<druid.version>26.0.0</druid.version>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.druid</groupId>
|
||||||
|
<artifactId>druid-server</artifactId>
|
||||||
|
<version>${druid.version}</version>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.druid</groupId>
|
||||||
|
<artifactId>druid-sql</artifactId>
|
||||||
|
<version>${druid.version}</version>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Tests -->
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.easymock</groupId>
|
||||||
|
<artifactId>easymock</artifactId>
|
||||||
|
<version>4.3</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.druid</groupId>
|
||||||
|
<artifactId>druid-processing</artifactId>
|
||||||
|
<version>${druid.version}</version>
|
||||||
|
<type>test-jar</type>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.druid</groupId>
|
||||||
|
<artifactId>druid-server</artifactId>
|
||||||
|
<version>${druid.version}</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
<type>test-jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.druid</groupId>
|
||||||
|
<artifactId>druid-sql</artifactId>
|
||||||
|
<version>${druid.version}</version>
|
||||||
|
<type>test-jar</type>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>junit</groupId>
|
||||||
|
<artifactId>junit</artifactId>
|
||||||
|
<version>4.12</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.alibaba.fastjson2</groupId>
|
||||||
|
<artifactId>fastjson2</artifactId>
|
||||||
|
<version>2.0.34</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<version>3.1</version>
|
||||||
|
<configuration>
|
||||||
|
<compilerArgument>-Xlint:unchecked</compilerArgument>
|
||||||
|
<source>11</source>
|
||||||
|
<target>11</target>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
|
<version>2.19.1</version>
|
||||||
|
<configuration>
|
||||||
|
<argLine>-Duser.timezone=UTC</argLine>
|
||||||
|
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-assembly-plugin</artifactId>
|
||||||
|
<version>2.5.5</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>distro-assembly</id>
|
||||||
|
<phase>package</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>single</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<finalName>${project.artifactId}-${project.version}</finalName>
|
||||||
|
<tarLongFileMode>posix</tarLongFileMode>
|
||||||
|
<descriptors>
|
||||||
|
<descriptor>src/assembly/assembly.xml</descriptor>
|
||||||
|
</descriptors>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-release-plugin</artifactId>
|
||||||
|
<version>2.5.3</version>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.maven.scm</groupId>
|
||||||
|
<artifactId>maven-scm-provider-gitexe</artifactId>
|
||||||
|
<version>1.9.4</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-jar-plugin</artifactId>
|
||||||
|
<version>3.0.2</version>
|
||||||
|
<configuration>
|
||||||
|
<archive>
|
||||||
|
<addMavenDescriptor>false</addMavenDescriptor>
|
||||||
|
</archive>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</project>
|
||||||
54
druid-udf/src/assembly/assembly.xml
Normal file
54
druid-udf/src/assembly/assembly.xml
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
<?xml version="1.0"?>
|
||||||
|
<!--
|
||||||
|
~ Copyright 2016 Imply Data, Inc.
|
||||||
|
~
|
||||||
|
~ Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
~ you may not use this file except in compliance with the License.
|
||||||
|
~ You may obtain a copy of the License at
|
||||||
|
~
|
||||||
|
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
~
|
||||||
|
~ Unless required by applicable law or agreed to in writing, software
|
||||||
|
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
~ See the License for the specific language governing permissions and
|
||||||
|
~ limitations under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.3"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.3 http://maven.apache.org/xsd/assembly-1.1.3.xsd">
|
||||||
|
<id>bin</id>
|
||||||
|
<formats>
|
||||||
|
<format>tar.gz</format>
|
||||||
|
</formats>
|
||||||
|
|
||||||
|
<baseDirectory>${project.name}</baseDirectory>
|
||||||
|
|
||||||
|
<dependencySets>
|
||||||
|
<dependencySet>
|
||||||
|
<useProjectArtifact>false</useProjectArtifact>
|
||||||
|
<useTransitiveDependencies>true</useTransitiveDependencies>
|
||||||
|
<outputDirectory>.</outputDirectory>
|
||||||
|
<unpack>false</unpack>
|
||||||
|
</dependencySet>
|
||||||
|
</dependencySets>
|
||||||
|
|
||||||
|
<fileSets>
|
||||||
|
<fileSet>
|
||||||
|
<directory>.</directory>
|
||||||
|
<outputDirectory/>
|
||||||
|
<includes>
|
||||||
|
<include>README.md</include>
|
||||||
|
<include>LICENSE</include>
|
||||||
|
</includes>
|
||||||
|
</fileSet>
|
||||||
|
<fileSet>
|
||||||
|
<directory>${project.build.directory}</directory>
|
||||||
|
<outputDirectory>.</outputDirectory>
|
||||||
|
<includes>
|
||||||
|
<include>*.jar</include>
|
||||||
|
</includes>
|
||||||
|
</fileSet>
|
||||||
|
</fileSets>
|
||||||
|
</assembly>
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
package org.apache.druid.query.udf;
|
||||||
|
|
||||||
|
import com.google.inject.Binder;
|
||||||
|
import org.apache.druid.guice.ExpressionModule;
|
||||||
|
import org.apache.druid.initialization.DruidModule;
|
||||||
|
import org.apache.druid.query.udf.expressions.CurrentTimestampMillisExprMacro;
|
||||||
|
import org.apache.druid.query.udf.expressions.DimensionBucketExprMacro;
|
||||||
|
import org.apache.druid.query.udf.sql.CurrentTimestampMillisOperatorConversion;
|
||||||
|
import org.apache.druid.query.udf.sql.DimensionBucketOperatorConversion;
|
||||||
|
import org.apache.druid.sql.guice.SqlBindings;
|
||||||
|
|
||||||
|
public class UdfModule implements DruidModule {
|
||||||
|
@Override
|
||||||
|
public void configure(Binder binder) {
|
||||||
|
SqlBindings.addOperatorConversion(binder, DimensionBucketOperatorConversion.class);
|
||||||
|
SqlBindings.addOperatorConversion(binder, CurrentTimestampMillisOperatorConversion.class);
|
||||||
|
ExpressionModule.addExprMacro(binder, DimensionBucketExprMacro.class);
|
||||||
|
ExpressionModule.addExprMacro(binder, CurrentTimestampMillisExprMacro.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*@Override
|
||||||
|
public List<? extends Module> getJacksonModules() {
|
||||||
|
// Register Jackson module for any classes we need to be able to use in JSON queries or ingestion specs.
|
||||||
|
return Collections.<Module>singletonList(new SimpleModule("UdfModule"));
|
||||||
|
}*/
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
package org.apache.druid.query.udf.expressions;
|
||||||
|
|
||||||
|
import org.apache.druid.math.expr.*;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class CurrentTimestampMillisExprMacro implements ExprMacroTable.ExprMacro {
|
||||||
|
private static final String NAME = "current_timestamp_millis"; // current_timestamp_millis
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String name() {
|
||||||
|
return NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Expr apply(List<Expr> args) {
|
||||||
|
validationHelperCheckArgumentCount(args, 0);
|
||||||
|
|
||||||
|
class CurrentTimestampMillisExpr implements Expr {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ExprEval eval(ObjectBinding bindings) {
|
||||||
|
return ExprEval.of(System.currentTimeMillis());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String stringify() {
|
||||||
|
return "current_timestamp_millis";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Expr visit(Shuttle shuttle) {
|
||||||
|
return shuttle.visit(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BindingAnalysis analyzeInputs() {
|
||||||
|
return BindingAnalysis.EMTPY;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
@Override
|
||||||
|
public ExpressionType getOutputType(InputBindingInspector inspector) {
|
||||||
|
return ExpressionType.LONG;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean canVectorize(InputBindingInspector inspector) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return new CurrentTimestampMillisExpr();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,82 @@
|
|||||||
|
package org.apache.druid.query.udf.expressions;
|
||||||
|
|
||||||
|
import org.apache.druid.math.expr.*;
|
||||||
|
import org.apache.druid.math.expr.ExprMacroTable.ExprMacro;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
public class DimensionBucketExprMacro implements ExprMacro {
|
||||||
|
private static final String NAME = "dimension_bucket";
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String name() {
|
||||||
|
return NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Expr apply(List<Expr> args) {
|
||||||
|
validationHelperCheckMinArgumentCount(args, 2);
|
||||||
|
Expr bucketCnt = args.get(0);
|
||||||
|
if(!bucketCnt.isLiteral()|| bucketCnt.eval(InputBindings.nilBindings()).asInt() <= 0) {
|
||||||
|
throw validationFailed("first bucketCount argument must is int literal and > 0");
|
||||||
|
}
|
||||||
|
return new DimensionBucketExpr(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
static class DimensionBucketExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr {
|
||||||
|
private final int bucketCount;
|
||||||
|
|
||||||
|
public DimensionBucketExpr(List<Expr> args) {
|
||||||
|
super(NAME, args);
|
||||||
|
bucketCount = args.get(0).eval(InputBindings.nilBindings()).asInt();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ExprEval eval(ObjectBinding bindings) {
|
||||||
|
int result = 1;
|
||||||
|
for (int i = 1; i < args.size(); i++) {
|
||||||
|
ExprEval eval = args.get(i).eval(bindings);
|
||||||
|
Object element = eval.value();
|
||||||
|
if(element instanceof Object[]){
|
||||||
|
for (Object ele : (Object[]) element) {
|
||||||
|
result = 31 * result + (ele == null ? 0 : ele.hashCode());
|
||||||
|
}
|
||||||
|
}else{
|
||||||
|
result = 31 * result + (element == null ? 0 : element.hashCode());
|
||||||
|
}
|
||||||
|
|
||||||
|
/*else if (element instanceof Number) {
|
||||||
|
//result = 31 * result + Integer.hashCode(((Number)element).intValue());
|
||||||
|
result = 31 * result + Long.hashCode(((Number)element).longValue());
|
||||||
|
}*/
|
||||||
|
}
|
||||||
|
|
||||||
|
int bucket = Math.abs(result) % bucketCount;
|
||||||
|
return ExprEval.of(IntToHexUtil.uInt16ToHexStringFast(bucket));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Expr visit(Shuttle shuttle) {
|
||||||
|
List<Expr> newArgs = args.stream().map(x -> x.visit(shuttle)).collect(Collectors.toList());
|
||||||
|
return shuttle.visit(new DimensionBucketExpr(newArgs));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BindingAnalysis analyzeInputs() {
|
||||||
|
return super.analyzeInputs();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
@Override
|
||||||
|
public ExpressionType getOutputType(InputBindingInspector inspector) {
|
||||||
|
return ExpressionType.STRING;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean canVectorize(InputBindingInspector inspector) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
package org.apache.druid.query.udf.expressions;
|
||||||
|
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
|
public class IntToHexUtil {
|
||||||
|
static final byte[] digits = {
|
||||||
|
'0' , '1' , '2' , '3' , '4' , '5' ,
|
||||||
|
'6' , '7' , '8' , '9' , 'a' , 'b' ,
|
||||||
|
'c' , 'd' , 'e' , 'f' , 'g' , 'h' ,
|
||||||
|
'i' , 'j' , 'k' , 'l' , 'm' , 'n' ,
|
||||||
|
'o' , 'p' , 'q' , 'r' , 's' , 't' ,
|
||||||
|
'u' , 'v' , 'w' , 'x' , 'y' , 'z'
|
||||||
|
};
|
||||||
|
static final String[] uInt16HexsCache;
|
||||||
|
static final int uInt16HexsCacheSize = 8192;
|
||||||
|
|
||||||
|
static{
|
||||||
|
uInt16HexsCache = new String[uInt16HexsCacheSize];
|
||||||
|
for (int i = 0; i < uInt16HexsCacheSize; i++) {
|
||||||
|
uInt16HexsCache[i] = uInt16ToHexString(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String uInt16ToHexStringFast(int i){
|
||||||
|
if(i < uInt16HexsCacheSize){
|
||||||
|
return uInt16HexsCache[i];
|
||||||
|
}else{
|
||||||
|
return uInt16ToHexString(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String uInt16ToHexString(int i){
|
||||||
|
byte[] bytes = new byte[4];
|
||||||
|
int mask = 15; // 16 - 1
|
||||||
|
int value = i;
|
||||||
|
bytes[3] = digits[value & mask];
|
||||||
|
value >>>= 4;
|
||||||
|
bytes[2] = digits[value & mask];
|
||||||
|
value >>>= 4;
|
||||||
|
bytes[1] = digits[value & mask];
|
||||||
|
value >>>= 4;
|
||||||
|
bytes[0] = digits[value & mask];
|
||||||
|
return new String(bytes, StandardCharsets.US_ASCII);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
package org.apache.druid.query.udf.sql;
|
||||||
|
|
||||||
|
import org.apache.calcite.rex.RexNode;
|
||||||
|
import org.apache.calcite.sql.SqlFunction;
|
||||||
|
import org.apache.calcite.sql.SqlFunctionCategory;
|
||||||
|
import org.apache.calcite.sql.SqlOperator;
|
||||||
|
import org.apache.calcite.sql.type.*;
|
||||||
|
import org.apache.druid.segment.column.RowSignature;
|
||||||
|
import org.apache.druid.sql.calcite.expression.DruidExpression;
|
||||||
|
import org.apache.druid.sql.calcite.expression.OperatorConversions;
|
||||||
|
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
|
||||||
|
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
|
||||||
|
public class CurrentTimestampMillisOperatorConversion implements SqlOperatorConversion {
|
||||||
|
private static final SqlFunction SQL_FUNCTION = OperatorConversions
|
||||||
|
.operatorBuilder("CURRENT_TIMESTAMP_MILLIS")
|
||||||
|
.operandTypes(SqlTypeFamily.ANY)
|
||||||
|
.requiredOperands(0)
|
||||||
|
.returnTypeNonNull(SqlTypeName.BIGINT)
|
||||||
|
.functionCategory(SqlFunctionCategory.USER_DEFINED_FUNCTION)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SqlOperator calciteOperator() {
|
||||||
|
return SQL_FUNCTION;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
@Override
|
||||||
|
public DruidExpression toDruidExpression(PlannerContext plannerContext, RowSignature rowSignature, RexNode rexNode) {
|
||||||
|
return OperatorConversions.convertDirectCall(plannerContext, rowSignature, rexNode, "current_timestamp_millis");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
package org.apache.druid.query.udf.sql;
|
||||||
|
|
||||||
|
import org.apache.calcite.rex.RexNode;
|
||||||
|
import org.apache.calcite.sql.SqlFunction;
|
||||||
|
import org.apache.calcite.sql.SqlFunctionCategory;
|
||||||
|
import org.apache.calcite.sql.SqlKind;
|
||||||
|
import org.apache.calcite.sql.SqlOperator;
|
||||||
|
import org.apache.calcite.sql.type.OperandTypes;
|
||||||
|
import org.apache.calcite.sql.type.ReturnTypes;
|
||||||
|
import org.apache.calcite.sql.type.SqlOperandCountRanges;
|
||||||
|
import org.apache.calcite.sql.type.SqlTypeName;
|
||||||
|
import org.apache.druid.segment.column.RowSignature;
|
||||||
|
import org.apache.druid.sql.calcite.expression.DruidExpression;
|
||||||
|
import org.apache.druid.sql.calcite.expression.OperatorConversions;
|
||||||
|
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
|
||||||
|
import org.apache.druid.sql.calcite.planner.Calcites;
|
||||||
|
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
|
||||||
|
public class DimensionBucketOperatorConversion implements SqlOperatorConversion {
|
||||||
|
private static final SqlFunction SQL_FUNCTION = new SqlFunction(
|
||||||
|
"DIMENSION_BUCKET",
|
||||||
|
SqlKind.OTHER_FUNCTION,
|
||||||
|
ReturnTypes.explicit(
|
||||||
|
factory -> Calcites.createSqlTypeWithNullability(factory, SqlTypeName.VARCHAR, true)
|
||||||
|
),
|
||||||
|
null,
|
||||||
|
OperandTypes.variadic(SqlOperandCountRanges.from(2)),
|
||||||
|
SqlFunctionCategory.USER_DEFINED_FUNCTION
|
||||||
|
);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SqlOperator calciteOperator() {
|
||||||
|
return SQL_FUNCTION;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
@Override
|
||||||
|
public DruidExpression toDruidExpression(PlannerContext plannerContext, RowSignature rowSignature, RexNode rexNode) {
|
||||||
|
return OperatorConversions.convertDirectCall(plannerContext, rowSignature, rexNode, "dimension_bucket");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
org.apache.druid.query.udf.UdfModule
|
||||||
@@ -0,0 +1,148 @@
|
|||||||
|
package org.apache.druid.query.udf.expressions;
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import org.apache.druid.math.expr.*;
|
||||||
|
import org.apache.druid.testing.InitializedNullHandlingTest;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
|
||||||
|
public class CurrentTimestampMillisExprTest extends InitializedNullHandlingTest {
|
||||||
|
private final ExprMacroTable exprMacroTable = new ExprMacroTable(Collections.singletonList(new CurrentTimestampMillisExprMacro()));
|
||||||
|
Expr.ObjectBinding inputBindings = InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("string", InputBindings.inputSupplier(ExpressionType.STRING, () -> "abcdef"))
|
||||||
|
.put("long", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1234L))
|
||||||
|
.put("double", InputBindings.inputSupplier(ExpressionType.DOUBLE, () -> 1.234))
|
||||||
|
.put("array1", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> new Object[]{"1", "2", "3"}))
|
||||||
|
.put("array2", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> new String[]{"1", "2", "3"}))
|
||||||
|
.put("nullString", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("nullLong", InputBindings.inputSupplier(ExpressionType.LONG, () -> null))
|
||||||
|
.put("nullDouble", InputBindings.inputSupplier(ExpressionType.DOUBLE, () -> null))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
Expr.ObjectBinding[] inputBindingArray = new Expr.ObjectBinding[]{
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> new Object[]{"5","7","8"}))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1L))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> "5.245.228.51"))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
// ...
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> "5,7,8"))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1L))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> "5.245.228.51"))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test() throws Exception{
|
||||||
|
Expr expr = Parser.parse("current_timestamp_millis()", exprMacroTable);
|
||||||
|
System.out.println(expr.analyzeInputs().getRequiredBindings());
|
||||||
|
ExprEval eval = expr.eval(inputBindings);
|
||||||
|
System.out.println(eval.value());
|
||||||
|
Thread.sleep(1000);
|
||||||
|
eval = expr.eval(inputBindings);
|
||||||
|
System.out.println(eval.value());
|
||||||
|
Thread.sleep(1000);
|
||||||
|
expr = Parser.parse("current_timestamp_millis()", exprMacroTable);
|
||||||
|
eval = expr.eval(inputBindings);
|
||||||
|
System.out.println(eval.value());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,146 @@
|
|||||||
|
package org.apache.druid.query.udf.expressions;
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import org.apache.druid.math.expr.*;
|
||||||
|
import org.apache.druid.testing.InitializedNullHandlingTest;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
|
||||||
|
public class DimensionBucketExprTest extends InitializedNullHandlingTest {
|
||||||
|
private final ExprMacroTable exprMacroTable = new ExprMacroTable(Collections.singletonList(new DimensionBucketExprMacro()));
|
||||||
|
Expr.ObjectBinding inputBindings = InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("string", InputBindings.inputSupplier(ExpressionType.STRING, () -> "abcdef"))
|
||||||
|
.put("long", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1234L))
|
||||||
|
.put("double", InputBindings.inputSupplier(ExpressionType.DOUBLE, () -> 1.234))
|
||||||
|
.put("array1", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> new Object[]{"1", "2", "3"}))
|
||||||
|
.put("array2", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> new String[]{"1", "2", "3"}))
|
||||||
|
.put("nullString", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("nullLong", InputBindings.inputSupplier(ExpressionType.LONG, () -> null))
|
||||||
|
.put("nullDouble", InputBindings.inputSupplier(ExpressionType.DOUBLE, () -> null))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
Expr.ObjectBinding[] inputBindingArray = new Expr.ObjectBinding[]{
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> new Object[]{"5","7","8"}))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1L))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> "5.245.228.51"))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
// ...
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> "5,7,8"))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
InputBindings.forInputSuppliers(
|
||||||
|
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
|
||||||
|
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
|
||||||
|
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
|
||||||
|
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1L))
|
||||||
|
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> "5.245.228.51"))
|
||||||
|
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
|
||||||
|
.build()
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test() {
|
||||||
|
Expr expr = Parser.parse("dimension_bucket(1024, 100, 'aaa', string,long,double,array1, array2, nullString, nullLong)", exprMacroTable);
|
||||||
|
ExprEval eval = expr.eval(inputBindings);
|
||||||
|
System.out.println(eval.value());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test2() {
|
||||||
|
for (Expr.ObjectBinding objectBinding : inputBindingArray) {
|
||||||
|
Expr expr = Parser.parse("dimension_bucket(1024, device_id, rule_id, template_id, chart_id, version, client_ip_object, server_ip_object, fqdn_category, client_ip, server_ip, server_fqdn, server_domain, application)", exprMacroTable);
|
||||||
|
ExprEval eval = expr.eval(objectBinding);
|
||||||
|
System.out.println(objectBinding.get("rule_id") + ", bucket_id:" + eval.value());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user