Merge branch 'develop' into 'main'

Develop

See merge request galaxy/platform/algorithm/druid-extensions!4
This commit is contained in:
李奉超
2024-08-09 03:31:50 +00:00
23 changed files with 2472 additions and 1350 deletions

View File

@@ -8,9 +8,7 @@ package org.HdrHistogram; /**
import java.io.IOException;
import java.io.ObjectInputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.*;
import java.util.zip.DataFormatException;
/**
@@ -305,6 +303,35 @@ public class ArrayHistogram extends AbstractHistogram implements Histogramer{
return percentiles;
}
@Override
public Map<String, Object> describe() {
long min = getMinValue();
long max = getMaxValue(); // max = this.maxValue;
long count = getTotalCount();
double mean = getMean();
long sum = (long) (mean * count);
mean = Math.round(mean * 100.0) / 100.0;
long p25 = getValueAtPercentile(25);
long p50 = getValueAtPercentile(50);
long p75 = getValueAtPercentile(75);
long p90 = getValueAtPercentile(90);
long p95 = getValueAtPercentile(95);
long p99 = getValueAtPercentile(99);
Map<String, Object> rst = new LinkedHashMap<>();
rst.put("count", count);
rst.put("mean", mean);
rst.put("sum", sum);
rst.put("min", min);
rst.put("p25", p25);
rst.put("p50", p50);
rst.put("p75", p75);
rst.put("p90", p90);
rst.put("p95", p95);
rst.put("p99", p99);
rst.put("max", max);
return rst;
}
@Override
public Histogramer resetHistogram() {
if(isAutoResize()){

View File

@@ -2,7 +2,9 @@ package org.HdrHistogram;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class DirectArrayHistogram extends AbstractHistogram implements Histogramer{
long totalCount;
@@ -172,6 +174,35 @@ public class DirectArrayHistogram extends AbstractHistogram implements Histogram
return percentiles;
}
@Override
public Map<String, Object> describe() {
long min = getMinValue();
long max = getMaxValue(); // max = this.maxValue;
long count = getTotalCount();
double mean = getMean();
long sum = (long) (mean * count);
mean = Math.round(mean * 100.0) / 100.0;
long p25 = getValueAtPercentile(25);
long p50 = getValueAtPercentile(50);
long p75 = getValueAtPercentile(75);
long p90 = getValueAtPercentile(90);
long p95 = getValueAtPercentile(95);
long p99 = getValueAtPercentile(99);
Map<String, Object> rst = new LinkedHashMap<>();
rst.put("count", count);
rst.put("mean", mean);
rst.put("sum", sum);
rst.put("min", min);
rst.put("p25", p25);
rst.put("p50", p50);
rst.put("p75", p75);
rst.put("p90", p90);
rst.put("p95", p95);
rst.put("p99", p99);
rst.put("max", max);
return rst;
}
@Override
public Histogramer resetHistogram() {
throw new UnsupportedOperationException("unsupported method");

View File

@@ -3,6 +3,7 @@ package org.HdrHistogram;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.List;
import java.util.Map;
import java.util.zip.DataFormatException;
import java.util.zip.Inflater;
@@ -446,6 +447,11 @@ public class DirectMapHistogram implements Histogramer{
throw new UnsupportedOperationException("unsupported method");
}
@Override
public Map<String, Object> describe() {
throw new UnsupportedOperationException("unsupported method");
}
@Override
public Histogramer resetHistogram() {
throw new UnsupportedOperationException("unsupported method");

View File

@@ -2,6 +2,7 @@ package org.HdrHistogram;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
public class HistogramSketch {
public Histogramer hisImpl = null;
@@ -59,6 +60,10 @@ public class HistogramSketch {
return hisImpl.percentileList(percentileTicksPerHalfDistance);
}
public Map<String, Object> describe(){
return hisImpl.describe();
}
public static final int getUpdatableSerializationBytes(long lowestDiscernibleValue, long highestTrackableValue, int numberOfSignificantValueDigits){
return DirectArrayHistogram.getUpdatableSerializationBytes(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
}

View File

@@ -2,6 +2,7 @@ package org.HdrHistogram;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
public interface Histogramer {
long getTotalCount();
@@ -14,6 +15,8 @@ public interface Histogramer {
List<Percentile> percentileList(int percentileTicksPerHalfDistance);
Map<String, Object> describe();
Histogramer resetHistogram();
Histogramer merge(Histogramer histogram);

View File

@@ -38,4 +38,13 @@ public class Percentile {
public void setPercentile(double percentile) {
this.percentile = percentile;
}
@Override
public String toString() {
return "Percentile{" +
"value=" + value +
", count=" + count +
", percentile=" + percentile +
'}';
}
}

View File

@@ -21,7 +21,7 @@ public class HdrHistogramAggregatorFactory extends AggregatorFactory {
public static final long DEFAULT_HIGHEST = 2;
public static final int DEFAULT_SIGNIFICANT = 1;
public static final boolean DEFAULT_AUTO_RESIZE = true;
public static final long BUFFER_AUTO_RESIZE_HIGHEST = 100000000L * 1000000L;
public static final long BUFFER_AUTO_RESIZE_HIGHEST = 100000000L * 100L;
public static final Comparator<HistogramSketch> COMPARATOR =
Comparator.nullsFirst(Comparator.comparingLong(HistogramSketch::getTotalCount));

View File

@@ -37,7 +37,7 @@ public class HdrHistogramMergeBufferAggregator implements BufferAggregator {
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
this.autoResize = autoResize;
this.size = size;
LOG.error("HdrHistogramMergeBufferAggregator gene:" + Thread.currentThread().getName() + "-" + Thread.currentThread().getId());
//LOG.error("HdrHistogramMergeBufferAggregator gene:" + Thread.currentThread().getName() + "-" + Thread.currentThread().getId());
}
@Override
@@ -83,7 +83,7 @@ public class HdrHistogramMergeBufferAggregator implements BufferAggregator {
@Nullable
@Override
public synchronized HistogramSketch get(ByteBuffer buf, int position) {
LOG.error("HdrHistogramMergeBufferAggregator get:" + 0 + "-" + Thread.currentThread().getId() + "-" + this);
//LOG.error("HdrHistogramMergeBufferAggregator get:" + 0 + "-" + Thread.currentThread().getId() + "-" + this);
HistogramUnion union = histograms.get(buf).get(position);
//return histogram.copy();
return union.getResult().copy();

View File

@@ -9,10 +9,7 @@ import com.google.common.annotations.VisibleForTesting;
import com.google.inject.Binder;
import org.HdrHistogram.HistogramSketch;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramObjectSqlAggregator;
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramPercentilesOperatorConversion;
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramQuantileSqlAggregator;
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.HdrHistogramQuantilesOperatorConversion;
import org.apache.druid.query.aggregation.sketch.HdrHistogram.sql.*;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.serde.ComplexMetrics;
import org.apache.druid.sql.guice.SqlBindings;
@@ -27,6 +24,8 @@ public class HdrHistogramModule implements DruidModule {
public static final byte QUANTILES_HDRHISTOGRAM_TO_QUANTILE_CACHE_TYPE_ID = 0x03;
public static final byte QUANTILES_HDRHISTOGRAM_TO_QUANTILES_CACHE_TYPE_ID = 0x04;
public static final byte QUANTILES_HDRHISTOGRAM_TO_PERCENTILES_CACHE_TYPE_ID = 0x05;
public static final byte QUANTILES_HDRHISTOGRAM_TO_DESCRIBE_CACHE_TYPE_ID = 0x06;
public static final byte QUANTILES_HDRHISTOGRAM_TO_PERCENTILES_DESCRIBE_CACHE_TYPE_ID = 0x07;
public static final String HDRHISTOGRAM_TYPE_NAME = "HdrHistogramSketch";
public static final ColumnType TYPE = ColumnType.ofComplex(HDRHISTOGRAM_TYPE_NAME);
@@ -50,6 +49,8 @@ public class HdrHistogramModule implements DruidModule {
SqlBindings.addOperatorConversion(binder, HdrHistogramQuantilesOperatorConversion.class);
SqlBindings.addOperatorConversion(binder, HdrHistogramPercentilesOperatorConversion.class);
SqlBindings.addOperatorConversion(binder, HdrHistogramDescribeOperatorConversion.class);
SqlBindings.addOperatorConversion(binder, HdrHistogramPercentilesDescribeOperatorConversion.class);
}
@Override
@@ -61,7 +62,9 @@ public class HdrHistogramModule implements DruidModule {
new NamedType(HdrHistogramMergeAggregatorFactory.class, "HdrHistogramSketchMerge"),
new NamedType(HdrHistogramToQuantilePostAggregator.class, "HdrHistogramSketchToQuantile"),
new NamedType(HdrHistogramToQuantilesPostAggregator.class, "HdrHistogramSketchToQuantiles"),
new NamedType(HdrHistogramToPercentilesPostAggregator.class, "HdrHistogramSketchToPercentiles")
new NamedType(HdrHistogramToPercentilesPostAggregator.class, "HdrHistogramSketchToPercentiles"),
new NamedType(HdrHistogramToDescribePostAggregator.class, "HdrHistogramSketchToDescribe"),
new NamedType(HdrHistogramToPercentilesDescribePostAggregator.class, "HdrHistogramSketchToPercentilesDescription")
).addSerializer(HistogramSketch.class, new HistogramJsonSerializer())
);
}

View File

@@ -0,0 +1,108 @@
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.Sets;
import org.HdrHistogram.HistogramSketch;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.segment.ColumnInspector;
import org.apache.druid.segment.column.ColumnType;
import javax.annotation.Nullable;
import java.util.*;
public class HdrHistogramToDescribePostAggregator implements PostAggregator {
private final String name;
private final String fieldName;
@JsonCreator
public HdrHistogramToDescribePostAggregator(
@JsonProperty("name") String name,
@JsonProperty("fieldName") String fieldName
){
this.name = name;
this.fieldName = fieldName;
}
@Override
public ColumnType getType(ColumnInspector signature){
return ColumnType.STRING;
}
@Override
@JsonProperty
public String getName() {
return name;
}
@JsonProperty
public String getFieldName() {
return fieldName;
}
@Nullable
@Override
public Object compute(Map<String, Object> values) {
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
if(histogram == null){
return "{}"; //"[]"
}
return HdrHistogramModule.toJson(histogram.describe());
}
@Override
public Comparator<double[]> getComparator()
{
throw new IAE("Comparing arrays of quantiles is not supported");
}
@Override
public Set<String> getDependentFields()
{
return Sets.newHashSet(fieldName);
}
@Override
public PostAggregator decorate(Map<String, AggregatorFactory> aggregators) {
return this;
}
@Override
public byte[] getCacheKey() {
CacheKeyBuilder builder = new CacheKeyBuilder(HdrHistogramModule.CACHE_TYPE_ID_OFFSET).appendByte(HdrHistogramModule.QUANTILES_HDRHISTOGRAM_TO_DESCRIBE_CACHE_TYPE_ID)
.appendString(fieldName);
return builder.build();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HdrHistogramToDescribePostAggregator that = (HdrHistogramToDescribePostAggregator) o;
return name.equals(that.name) &&
fieldName.equals(that.fieldName);
}
@Override
public int hashCode() {
return Objects.hash(name, fieldName);
}
@Override
public String toString() {
return "HdrHistogramToDescribePostAggregator{" +
"name='" + name + '\'' +
", fieldName='" + fieldName + '\'' +
'}';
}
}

View File

@@ -0,0 +1,125 @@
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.Sets;
import org.HdrHistogram.HistogramSketch;
import org.HdrHistogram.Percentile;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.segment.ColumnInspector;
import org.apache.druid.segment.column.ColumnType;
import javax.annotation.Nullable;
import java.util.*;
public class HdrHistogramToPercentilesDescribePostAggregator implements PostAggregator {
private final String name;
private final String fieldName;
private final int percentileTicksPerHalfDistance;
@JsonCreator
public HdrHistogramToPercentilesDescribePostAggregator(
@JsonProperty("name") String name,
@JsonProperty("fieldName") String fieldName,
@JsonProperty("percentileTicksPerHalfDistance") int percentileTicksPerHalfDistance
){
this.name = name;
this.fieldName = fieldName;
this.percentileTicksPerHalfDistance = percentileTicksPerHalfDistance;
}
@Override
public ColumnType getType(ColumnInspector signature){
return ColumnType.STRING;
}
@Override
@JsonProperty
public String getName() {
return name;
}
@JsonProperty
public String getFieldName() {
return fieldName;
}
@JsonProperty
public int getPercentileTicksPerHalfDistance() {
return percentileTicksPerHalfDistance;
}
@Nullable
@Override
public Object compute(Map<String, Object> values) {
HistogramSketch histogram = (HistogramSketch) values.get(fieldName);
if(histogram == null){
return "{\"percentiles\":[],\"describe\":{}}";
}
List<Percentile> percentiles = histogram.percentileList(percentileTicksPerHalfDistance);
Map<String, Object> describe = histogram.describe();
Map<String, Object> rst = new LinkedHashMap<>();
rst.put("percentiles", percentiles);
rst.put("description", describe);
return HdrHistogramModule.toJson(rst);
}
@Override
public Comparator<double[]> getComparator()
{
throw new IAE("Comparing object is not supported");
}
@Override
public Set<String> getDependentFields()
{
return Sets.newHashSet(fieldName);
}
@Override
public PostAggregator decorate(Map<String, AggregatorFactory> aggregators) {
return this;
}
@Override
public byte[] getCacheKey() {
CacheKeyBuilder builder = new CacheKeyBuilder(HdrHistogramModule.CACHE_TYPE_ID_OFFSET).appendByte(HdrHistogramModule.QUANTILES_HDRHISTOGRAM_TO_PERCENTILES_DESCRIBE_CACHE_TYPE_ID)
.appendString(fieldName);
builder.appendInt(percentileTicksPerHalfDistance);
return builder.build();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HdrHistogramToPercentilesDescribePostAggregator that = (HdrHistogramToPercentilesDescribePostAggregator) o;
return percentileTicksPerHalfDistance == that.percentileTicksPerHalfDistance &&
name.equals(that.name) &&
fieldName.equals(that.fieldName);
}
@Override
public int hashCode() {
return Objects.hash(name, fieldName, percentileTicksPerHalfDistance);
}
@Override
public String toString() {
return "HdrHistogramToPercentilesDescribePostAggregator{" +
"name='" + name + '\'' +
", fieldName='" + fieldName + '\'' +
", probabilitys=" + percentileTicksPerHalfDistance +
'}';
}
}

View File

@@ -0,0 +1,77 @@
package org.apache.druid.query.aggregation.sketch.HdrHistogram.sql;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramToDescribePostAggregator;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.PostAggregatorVisitor;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
import java.util.List;
public class HdrHistogramDescribeOperatorConversion implements SqlOperatorConversion {
private static final String FUNCTION_NAME = "HDR_DESCRIBE";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
.operandTypes(SqlTypeFamily.ANY)
.requiredOperands(1)
.returnTypeInference(ReturnTypes.explicit(SqlTypeName.VARCHAR))
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
return null;
}
@Nullable
@Override
public PostAggregator toPostAggregator(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode,
PostAggregatorVisitor postAggregatorVisitor
)
{
final List<RexNode> operands = ((RexCall) rexNode).getOperands();
final PostAggregator postAgg = OperatorConversions.toPostAggregator(
plannerContext,
rowSignature,
operands.get(0),
postAggregatorVisitor,
true
);
if (postAgg == null) {
return null;
}
return new HdrHistogramToDescribePostAggregator(
postAggregatorVisitor.getOutputNamePrefix() + postAggregatorVisitor.getAndIncrementCounter(),
((FieldAccessPostAggregator)postAgg).getFieldName()
);
}
}

View File

@@ -0,0 +1,88 @@
package org.apache.druid.query.aggregation.sketch.HdrHistogram.sql;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.query.aggregation.sketch.HdrHistogram.HdrHistogramToPercentilesDescribePostAggregator;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.PostAggregatorVisitor;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
import java.util.List;
public class HdrHistogramPercentilesDescribeOperatorConversion implements SqlOperatorConversion {
private static final String FUNCTION_NAME = "HDR_GET_PERCENTILES_DESCRIPTION";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
.operandTypes(SqlTypeFamily.ANY, SqlTypeFamily.NUMERIC)
.requiredOperands(1)
.returnTypeInference(ReturnTypes.explicit(SqlTypeName.VARCHAR))
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
return null;
}
@Nullable
@Override
public PostAggregator toPostAggregator(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode,
PostAggregatorVisitor postAggregatorVisitor
)
{
final List<RexNode> operands = ((RexCall) rexNode).getOperands();
final PostAggregator postAgg = OperatorConversions.toPostAggregator(
plannerContext,
rowSignature,
operands.get(0),
postAggregatorVisitor,
true
);
if (postAgg == null) {
return null;
}
int percentileTicksPerHalfDistance = 5;
if (operands.size() == 2) {
if (!operands.get(1).isA(SqlKind.LITERAL)) {
return null;
}
percentileTicksPerHalfDistance = RexLiteral.intValue(operands.get(1));
}
return new HdrHistogramToPercentilesDescribePostAggregator(
postAggregatorVisitor.getOutputNamePrefix() + postAggregatorVisitor.getAndIncrementCounter(),
((FieldAccessPostAggregator)postAgg).getFieldName(),
percentileTicksPerHalfDistance
);
}
}

View File

@@ -0,0 +1,79 @@
package org.apache.druid.query.aggregation.sketch.HdrHistogram;
import org.HdrHistogram.DirectArrayHistogram;
import org.HdrHistogram.HistogramSketch;
import org.HdrHistogram.Histogramer;
import org.HdrHistogram.Percentile;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;
import java.io.BufferedWriter;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
public class HistogramSketchTest {
@Test
public void describeTest() throws Exception{
DirectArrayHistogram histogram = new DirectArrayHistogram(1, 1000000, 3,
ByteBuffer.allocate(HistogramSketch.getUpdatableSerializationBytes(1, 1000000, 3)));
System.out.println(histogram.describe());
for (int i = 0; i < 10000; i++) {
histogram.recordValue(i);
}
System.out.println(histogram.describe());
for (Percentile percentile : histogram.percentileList(100)) {
System.out.println(percentile);
}
}
@Test
public void describeTest1() throws Exception{
HistogramSketch histogram = new HistogramSketch(1);
System.out.println(histogram.describe());
for (int i = 0; i < 10000; i++) {
histogram.recordValue(i);
}
System.out.println(histogram.describe());
for (Percentile percentile : histogram.percentileList(100)) {
System.out.println(percentile);
}
System.out.println(StringUtils.repeat('#', 100));
histogram = new HistogramSketch(1);
for (int i = 0; i < 10000; i++) {
histogram.recordValue(ThreadLocalRandom.current().nextLong(100000));
}
System.out.println(histogram.describe());
for (Percentile percentile : histogram.percentileList(100)) {
System.out.println(percentile);
}
}
@Test
public void describeTest3() throws Exception{
HistogramSketch histogram = new HistogramSketch(3);
System.out.println(histogram.describe());
for (int i = 0; i < 10000; i++) {
histogram.recordValue(i);
}
System.out.println(histogram.describe());
for (Percentile percentile : histogram.percentileList(100)) {
System.out.println(percentile);
}
System.out.println(StringUtils.repeat('#', 100));
histogram = new HistogramSketch(3);
for (int i = 0; i < 10000; i++) {
histogram.recordValue(ThreadLocalRandom.current().nextLong(100000));
}
System.out.println(histogram.describe());
for (Percentile percentile : histogram.percentileList(100)) {
System.out.println(percentile);
}
}
}

View File

@@ -219,6 +219,30 @@ public class HdrHistogramQuantileSqlAggregatorTest extends BaseCalciteQueryTest
}
}
@Test
public void testSqlDESCRIBE() throws Exception {
String sql = "select HDR_GET_QUANTILES(HDR_HISTOGRAM(m1, 1, 100, 2), 0, 0.25, 0.5, 0.75, 1) a, HDR_DESCRIBE(HDR_HISTOGRAM(m1, 1, 100, 2)) b, HDR_DESCRIBE(HDR_HISTOGRAM(hist_m1, 1, 100, 2)) c from druid.foo";
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
builder.run();
QueryTestRunner.QueryResults queryResults = builder.results();
List<Object[]> results = queryResults.results;
for (Object[] result : results) {
System.out.println(Arrays.toString(result));
}
}
@Test
public void testSqlDESCRIBE2() throws Exception {
String sql = "select HDR_GET_QUANTILES(HDR_HISTOGRAM(m1, 1, 100, 2), 0, 0.25, 0.5, 0.75, 1) a, HDR_GET_PERCENTILES_DESCRIPTION(HDR_HISTOGRAM(m1, 1, 100, 2)) b, HDR_GET_PERCENTILES_DESCRIPTION(HDR_HISTOGRAM(hist_m1, 1, 100, 2)) c from druid.foo";
QueryTestBuilder builder = testBuilder().sql(sql).skipVectorize();
builder.run();
QueryTestRunner.QueryResults queryResults = builder.results();
List<Object[]> results = queryResults.results;
for (Object[] result : results) {
System.out.println(Arrays.toString(result));
}
}
@Test
public void testSqlQuery() throws Exception {
String[] columns = new String[]{"__time", "dim1", "dim2", "dim3", "cnt", "hist_m1", "m1"};

143
druid-udf/pom.xml Normal file
View File

@@ -0,0 +1,143 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>druid-udf_26.0.0</artifactId>
<name>druid-udf</name>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>11</maven.compiler.source>
<maven.compiler.target>11</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<druid.version>26.0.0</druid.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-server</artifactId>
<version>${druid.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-sql</artifactId>
<version>${druid.version}</version>
<scope>provided</scope>
</dependency>
<!-- Tests -->
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<version>4.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-processing</artifactId>
<version>${druid.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-server</artifactId>
<version>${druid.version}</version>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-sql</artifactId>
<version>${druid.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.alibaba.fastjson2</groupId>
<artifactId>fastjson2</artifactId>
<version>2.0.34</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<compilerArgument>-Xlint:unchecked</compilerArgument>
<source>11</source>
<target>11</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.19.1</version>
<configuration>
<argLine>-Duser.timezone=UTC</argLine>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.5.5</version>
<executions>
<execution>
<id>distro-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<finalName>${project.artifactId}-${project.version}</finalName>
<tarLongFileMode>posix</tarLongFileMode>
<descriptors>
<descriptor>src/assembly/assembly.xml</descriptor>
</descriptors>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-release-plugin</artifactId>
<version>2.5.3</version>
<dependencies>
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-gitexe</artifactId>
<version>1.9.4</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.2</version>
<configuration>
<archive>
<addMavenDescriptor>false</addMavenDescriptor>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,54 @@
<?xml version="1.0"?>
<!--
~ Copyright 2016 Imply Data, Inc.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.3"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.3 http://maven.apache.org/xsd/assembly-1.1.3.xsd">
<id>bin</id>
<formats>
<format>tar.gz</format>
</formats>
<baseDirectory>${project.name}</baseDirectory>
<dependencySets>
<dependencySet>
<useProjectArtifact>false</useProjectArtifact>
<useTransitiveDependencies>true</useTransitiveDependencies>
<outputDirectory>.</outputDirectory>
<unpack>false</unpack>
</dependencySet>
</dependencySets>
<fileSets>
<fileSet>
<directory>.</directory>
<outputDirectory/>
<includes>
<include>README.md</include>
<include>LICENSE</include>
</includes>
</fileSet>
<fileSet>
<directory>${project.build.directory}</directory>
<outputDirectory>.</outputDirectory>
<includes>
<include>*.jar</include>
</includes>
</fileSet>
</fileSets>
</assembly>

View File

@@ -0,0 +1,23 @@
package org.apache.druid.query.udf;
import com.google.inject.Binder;
import org.apache.druid.guice.ExpressionModule;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.query.udf.expressions.DimensionBucketExprMacro;
import org.apache.druid.query.udf.sql.DimensionBucketOperatorConversion;
import org.apache.druid.sql.guice.SqlBindings;
public class UdfModule implements DruidModule {
@Override
public void configure(Binder binder) {
SqlBindings.addOperatorConversion(binder, DimensionBucketOperatorConversion.class);
ExpressionModule.addExprMacro(binder, DimensionBucketExprMacro.class);
}
/*@Override
public List<? extends Module> getJacksonModules() {
// Register Jackson module for any classes we need to be able to use in JSON queries or ingestion specs.
return Collections.<Module>singletonList(new SimpleModule("UdfModule"));
}*/
}

View File

@@ -0,0 +1,82 @@
package org.apache.druid.query.udf.expressions;
import org.apache.druid.math.expr.*;
import org.apache.druid.math.expr.ExprMacroTable.ExprMacro;
import javax.annotation.Nullable;
import java.util.List;
import java.util.stream.Collectors;
public class DimensionBucketExprMacro implements ExprMacro {
private static final String NAME = "dimension_bucket";
@Override
public String name() {
return NAME;
}
@Override
public Expr apply(List<Expr> args) {
validationHelperCheckMinArgumentCount(args, 2);
Expr bucketCnt = args.get(0);
if(!bucketCnt.isLiteral()|| bucketCnt.eval(InputBindings.nilBindings()).asInt() <= 0) {
throw validationFailed("first bucketCount argument must is int literal and > 0");
}
return new DimensionBucketExpr(args);
}
static class DimensionBucketExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr {
private final int bucketCount;
public DimensionBucketExpr(List<Expr> args) {
super(NAME, args);
bucketCount = args.get(0).eval(InputBindings.nilBindings()).asInt();
}
@Override
public ExprEval eval(ObjectBinding bindings) {
int result = 1;
for (int i = 1; i < args.size(); i++) {
ExprEval eval = args.get(i).eval(bindings);
Object element = eval.value();
if(element instanceof Object[]){
for (Object ele : (Object[]) element) {
result = 31 * result + (ele == null ? 0 : ele.hashCode());
}
}else{
result = 31 * result + (element == null ? 0 : element.hashCode());
}
/*else if (element instanceof Number) {
//result = 31 * result + Integer.hashCode(((Number)element).intValue());
result = 31 * result + Long.hashCode(((Number)element).longValue());
}*/
}
int bucket = Math.abs(result) % bucketCount;
return ExprEval.of(IntToHexUtil.uInt16ToHexStringFast(bucket));
}
@Override
public Expr visit(Shuttle shuttle) {
List<Expr> newArgs = args.stream().map(x -> x.visit(shuttle)).collect(Collectors.toList());
return shuttle.visit(new DimensionBucketExpr(newArgs));
}
@Override
public BindingAnalysis analyzeInputs() {
return super.analyzeInputs();
}
@Nullable
@Override
public ExpressionType getOutputType(InputBindingInspector inspector) {
return ExpressionType.STRING;
}
@Override
public boolean canVectorize(InputBindingInspector inspector) {
return false;
}
}
}

View File

@@ -0,0 +1,45 @@
package org.apache.druid.query.udf.expressions;
import java.nio.charset.StandardCharsets;
public class IntToHexUtil {
static final byte[] digits = {
'0' , '1' , '2' , '3' , '4' , '5' ,
'6' , '7' , '8' , '9' , 'a' , 'b' ,
'c' , 'd' , 'e' , 'f' , 'g' , 'h' ,
'i' , 'j' , 'k' , 'l' , 'm' , 'n' ,
'o' , 'p' , 'q' , 'r' , 's' , 't' ,
'u' , 'v' , 'w' , 'x' , 'y' , 'z'
};
static final String[] uInt16HexsCache;
static final int uInt16HexsCacheSize = 8192;
static{
uInt16HexsCache = new String[uInt16HexsCacheSize];
for (int i = 0; i < uInt16HexsCacheSize; i++) {
uInt16HexsCache[i] = uInt16ToHexString(i);
}
}
public static String uInt16ToHexStringFast(int i){
if(i < uInt16HexsCacheSize){
return uInt16HexsCache[i];
}else{
return uInt16ToHexString(i);
}
}
private static String uInt16ToHexString(int i){
byte[] bytes = new byte[4];
int mask = 15; // 16 - 1
int value = i;
bytes[3] = digits[value & mask];
value >>>= 4;
bytes[2] = digits[value & mask];
value >>>= 4;
bytes[1] = digits[value & mask];
value >>>= 4;
bytes[0] = digits[value & mask];
return new String(bytes, StandardCharsets.US_ASCII);
}
}

View File

@@ -0,0 +1,43 @@
package org.apache.druid.query.udf.sql;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlOperandCountRanges;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
public class DimensionBucketOperatorConversion implements SqlOperatorConversion {
private static final SqlFunction SQL_FUNCTION = new SqlFunction(
"DIMENSION_BUCKET",
SqlKind.OTHER_FUNCTION,
ReturnTypes.explicit(
factory -> Calcites.createSqlTypeWithNullability(factory, SqlTypeName.VARCHAR, true)
),
null,
OperandTypes.variadic(SqlOperandCountRanges.from(2)),
SqlFunctionCategory.USER_DEFINED_FUNCTION
);
@Override
public SqlOperator calciteOperator() {
return SQL_FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(PlannerContext plannerContext, RowSignature rowSignature, RexNode rexNode) {
return OperatorConversions.convertDirectCall(plannerContext, rowSignature, rexNode, "dimension_bucket");
}
}

View File

@@ -0,0 +1 @@
org.apache.druid.query.udf.UdfModule

View File

@@ -0,0 +1,146 @@
package org.apache.druid.query.udf.expressions;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.math.expr.*;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.Test;
import java.util.Collections;
public class DimensionBucketExprTest extends InitializedNullHandlingTest {
private final ExprMacroTable exprMacroTable = new ExprMacroTable(Collections.singletonList(new DimensionBucketExprMacro()));
Expr.ObjectBinding inputBindings = InputBindings.forInputSuppliers(
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
.put("string", InputBindings.inputSupplier(ExpressionType.STRING, () -> "abcdef"))
.put("long", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1234L))
.put("double", InputBindings.inputSupplier(ExpressionType.DOUBLE, () -> 1.234))
.put("array1", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> new Object[]{"1", "2", "3"}))
.put("array2", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> new String[]{"1", "2", "3"}))
.put("nullString", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("nullLong", InputBindings.inputSupplier(ExpressionType.LONG, () -> null))
.put("nullDouble", InputBindings.inputSupplier(ExpressionType.DOUBLE, () -> null))
.build()
);
Expr.ObjectBinding[] inputBindingArray = new Expr.ObjectBinding[]{
InputBindings.forInputSuppliers(
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.build()
),
InputBindings.forInputSuppliers(
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> new Object[]{"5","7","8"}))
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.build()
),
InputBindings.forInputSuppliers(
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1L))
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING_ARRAY, () -> null))
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> "5.245.228.51"))
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.build()
),
// ...
InputBindings.forInputSuppliers(
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 81))
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.build()
),
InputBindings.forInputSuppliers(
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 101))
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1))
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> "5,7,8"))
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.build()
),
InputBindings.forInputSuppliers(
new ImmutableMap.Builder<String, InputBindings.InputSupplier>()
.put("device_id", InputBindings.inputSupplier(ExpressionType.STRING, () -> "1"))
.put("rule_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
.put("template_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
.put("chart_id", InputBindings.inputSupplier(ExpressionType.LONG, () -> 271L))
.put("version", InputBindings.inputSupplier(ExpressionType.LONG, () -> 1L))
.put("client_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_ip_object", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("fqdn_category", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("client_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_ip", InputBindings.inputSupplier(ExpressionType.STRING, () -> "5.245.228.51"))
.put("server_fqdn", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("server_domain", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.put("application", InputBindings.inputSupplier(ExpressionType.STRING, () -> null))
.build()
),
};
@Test
public void test() {
Expr expr = Parser.parse("dimension_bucket(1024, 100, 'aaa', string,long,double,array1, array2, nullString, nullLong)", exprMacroTable);
ExprEval eval = expr.eval(inputBindings);
System.out.println(eval.value());
}
@Test
public void test2() {
for (Expr.ObjectBinding objectBinding : inputBindingArray) {
Expr expr = Parser.parse("dimension_bucket(1024, device_id, rule_id, template_id, chart_id, version, client_ip_object, server_ip_object, fqdn_category, client_ip, server_ip, server_fqdn, server_domain, application)", exprMacroTable);
ExprEval eval = expr.eval(objectBinding);
System.out.println(objectBinding.get("rule_id") + ", bucket_id:" + eval.value());
}
}
}