Skip to content

Commit

Permalink
remove legacy SQL incompatible modes (#17609)
Browse files Browse the repository at this point in the history
* remove legacy SQL incompatible modes

changes:
* druid.generic.useDefaultValueForNull is no longer wired up to anything and will log.warn if set to true
* druid.generic.useThreeValueLogicForNativeFilters is no longer wired up to anything and will log.warn if set to false
* clean up and simplify all code and tests related to null handling configs

* fixes

* fix

* fix IT
  • Loading branch information
clintropolis authored Jan 9, 2025
1 parent 9906544 commit 12e88b7
Show file tree
Hide file tree
Showing 1,053 changed files with 5,487 additions and 47,702 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.collections.bitmap.MutableBitmap;
import org.apache.druid.collections.bitmap.RoaringBitmapFactory;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.extendedset.intset.ConciseSetUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.filter.BoundDimFilter;
Expand Down Expand Up @@ -60,10 +59,6 @@
@Measurement(iterations = 10)
public class BoundFilterBenchmark
{
static {
NullHandling.initializeForTests();
}

private static final int START_INT = 1_000_000_000;
private static final int END_INT = ConciseSetUtils.MAX_ALLOWED_INTEGER;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.apache.druid.benchmark;

import org.apache.datasketches.hll.HllSketch;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.BufferAggregator;
import org.apache.druid.query.aggregation.datasketches.hll.HllSketchMergeAggregatorFactory;
Expand Down Expand Up @@ -55,10 +54,6 @@
@State(Scope.Benchmark)
public class DataSketchesHllBenchmark
{
static {
NullHandling.initializeForTests();
}

private final AggregatorFactory aggregatorFactory = new HllSketchMergeAggregatorFactory(
"hll",
"hll",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.apache.druid.benchmark;

import com.google.common.collect.ImmutableList;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.ColumnsFilter;
import org.apache.druid.data.input.InputEntityReader;
import org.apache.druid.data.input.InputRow;
Expand Down Expand Up @@ -83,10 +82,6 @@ public class DelimitedInputFormatBenchmark
"delta"
);

static {
NullHandling.initializeForTests();
}

@Param({"false", "true"})
private boolean fromHeader;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.collections.bitmap.MutableBitmap;
import org.apache.druid.collections.bitmap.RoaringBitmapFactory;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.ByteBufferUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.segment.data.BitmapSerdeFactory;
Expand Down Expand Up @@ -64,10 +63,6 @@
@Measurement(iterations = 10)
public class DictionaryEncodedStringIndexSupplierBenchmark
{
static {
NullHandling.initializeForTests();
}

@State(Scope.Benchmark)
public static class BenchmarkState
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.collections.bitmap.MutableBitmap;
import org.apache.druid.collections.bitmap.RoaringBitmapFactory;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.filter.ColumnIndexSelector;
import org.apache.druid.query.filter.DruidDoublePredicate;
Expand Down Expand Up @@ -63,10 +62,6 @@
@Measurement(iterations = 10)
public class DimensionPredicateFilterBenchmark
{
static {
NullHandling.initializeForTests();
}

private static final int START_INT = 1_000_000_000;

private static final DimensionPredicateFilter IS_EVEN = new DimensionPredicateFilter(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.apache.druid.benchmark;

import com.google.common.collect.ImmutableList;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.io.Closer;
Expand Down Expand Up @@ -70,10 +69,6 @@
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public class ExpressionAggregationBenchmark
{
static {
NullHandling.initializeForTests();
}

@Param({"1000000"})
private int rowsPerSegment;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.apache.druid.benchmark;

import com.google.common.collect.ImmutableList;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.io.Closer;
Expand Down Expand Up @@ -69,7 +68,6 @@
public class ExpressionFilterBenchmark
{
static {
NullHandling.initializeForTests();
ExpressionProcessing.initializeForTests();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.apache.druid.benchmark;

import com.google.common.collect.ImmutableList;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
Expand Down Expand Up @@ -81,7 +80,6 @@
public class ExpressionSelectorBenchmark
{
static {
NullHandling.initializeForTests();
ExpressionProcessing.initializeForTests();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.apache.druid.benchmark;

import com.google.common.collect.ImmutableList;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.math.expr.Expr;
Expand Down Expand Up @@ -73,10 +72,6 @@
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public class ExpressionVectorSelectorBenchmark
{
static {
NullHandling.initializeForTests();
}

@Param({"1000000"})
private int rowsPerSegment;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.FileUtils;
Expand Down Expand Up @@ -99,10 +98,6 @@
@Measurement(iterations = 25)
public class FilterPartitionBenchmark
{
static {
NullHandling.initializeForTests();
}

@Param({"750000"})
private int rowsPerSegment;

Expand Down Expand Up @@ -548,14 +543,12 @@ public Filter toFilter()
if (extractionFn == null) {
return new NoBitmapSelectorFilter(dimension, value);
} else {
final String valueOrNull = NullHandling.emptyToNullIfNeeded(value);

final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
{
@Override
public DruidObjectPredicate<String> makeStringPredicate()
{
return valueOrNull == null ? DruidObjectPredicate.isNull() : DruidObjectPredicate.equalTo(valueOrNull);
return value == null ? DruidObjectPredicate.isNull() : DruidObjectPredicate.equalTo(value);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.benchmark.query.QueryBenchmarkUtil;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.FileUtils;
Expand Down Expand Up @@ -104,10 +103,6 @@
@Measurement(iterations = 25)
public class FilteredAggregatorBenchmark
{
static {
NullHandling.initializeForTests();
}

@Param({"75000"})
private int rowsPerSegment;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.apache.druid.benchmark;

import org.apache.commons.math3.distribution.NormalDistribution;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.query.aggregation.histogram.FixedBucketsHistogram;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
Expand All @@ -47,10 +46,6 @@
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public class FixedHistogramAddBenchmark
{
static {
NullHandling.initializeForTests();
}

private static final int LOWER_LIMIT = 0;
private static final int UPPER_LIMIT = 100000;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.apache.druid.benchmark;

import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.aggregation.histogram.FixedBucketsHistogram;
import org.openjdk.jmh.annotations.Benchmark;
Expand Down Expand Up @@ -47,10 +46,6 @@
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public class FixedHistogramBenchmark
{
static {
NullHandling.initializeForTests();
}

private static final Logger log = new Logger(FixedHistogramBenchmark.class);

private static final int LOWER_LIMIT = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import com.google.common.base.Preconditions;
import org.apache.druid.benchmark.compression.EncodingSizeProfiler;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
Expand Down Expand Up @@ -76,10 +75,6 @@
@State(Scope.Benchmark)
public class FrontCodedIndexedBenchmark
{
static {
NullHandling.initializeForTests();
}

@Param({"10000", "100000"})
public int numElements;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.apache.druid.benchmark;

import com.google.common.primitives.Ints;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
import org.apache.druid.java.util.common.io.smoosh.SmooshedFileMapper;
Expand Down Expand Up @@ -61,10 +60,6 @@
@State(Scope.Benchmark)
public class GenericIndexedBenchmark
{
static {
NullHandling.initializeForTests();
}

public static final int ITERATIONS = 10000;

static final ObjectStrategy<byte[]> BYTE_ARRAY_STRATEGY = new ObjectStrategy<>()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.BuiltInTypesModule;
import org.apache.druid.jackson.AggregatorsModule;
import org.apache.druid.java.util.common.DateTimes;
Expand Down Expand Up @@ -66,9 +65,7 @@
@Measurement(iterations = 5)
public class GroupByDeserializationBenchmark
{

static {
NullHandling.initializeForTests();
BuiltInTypesModule.registerHandlersAndSerde();
AggregatorsModule.registerComplexMetricsAndSerde();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import org.apache.druid.collections.DefaultBlockingPool;
import org.apache.druid.collections.NonBlockingPool;
import org.apache.druid.collections.StupidPool;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.FileUtils;
Expand Down Expand Up @@ -109,10 +108,6 @@
@Measurement(iterations = 30)
public class GroupByTypeInterfaceBenchmark
{
static {
NullHandling.initializeForTests();
}

private static final SegmentId Q_INDEX_SEGMENT_ID = SegmentId.dummy("qIndex");

@Param({"4"})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.collections.bitmap.MutableBitmap;
import org.apache.druid.collections.bitmap.RoaringBitmapFactory;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.filter.ColumnIndexSelector;
import org.apache.druid.query.filter.InDimFilter;
Expand Down Expand Up @@ -59,10 +58,6 @@
@Measurement(iterations = 3)
public class InFilterBenchmark
{
static {
NullHandling.initializeForTests();
}

private static final int START_INT = 10_000_000;

private InDimFilter inFilter;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.MapBasedInputRow;
import org.apache.druid.java.util.common.StringUtils;
Expand Down Expand Up @@ -52,10 +51,6 @@
@State(Scope.Benchmark)
public class IncrementalIndexRowTypeBenchmark
{
static {
NullHandling.initializeForTests();
}

@Param({"250000"})
private int rowsPerSegment;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.StringUtils;
Expand Down Expand Up @@ -92,10 +91,6 @@
@Measurement(iterations = 5)
public class IndexedTableJoinCursorBenchmark
{
static {
NullHandling.initializeForTests();
}

private static final List<Set<String>> PROJECTIONS = ImmutableList.of(
// 0 string key rhs
ImmutableSet.of("j0.stringKey"),
Expand Down
Loading

0 comments on commit 12e88b7

Please sign in to comment.