diff --git a/core/trino-main/src/main/java/io/trino/operator/BigintGroupByHash.java b/core/trino-main/src/main/java/io/trino/operator/BigintGroupByHash.java index 84b559485697..9d8d2b5a75c5 100644 --- a/core/trino-main/src/main/java/io/trino/operator/BigintGroupByHash.java +++ b/core/trino-main/src/main/java/io/trino/operator/BigintGroupByHash.java @@ -656,7 +656,7 @@ public GroupByIdBlock getResult() return new GroupByIdBlock( nextGroupId, - new RunLengthEncodedBlock( + RunLengthEncodedBlock.create( BIGINT.createFixedSizeBlockBuilder(1).writeLong(groupId).build(), block.getPositionCount())); } diff --git a/core/trino-main/src/main/java/io/trino/operator/ChangeOnlyUpdatedColumnsMergeProcessor.java b/core/trino-main/src/main/java/io/trino/operator/ChangeOnlyUpdatedColumnsMergeProcessor.java index 05396a4f1b3c..063bae723b07 100644 --- a/core/trino-main/src/main/java/io/trino/operator/ChangeOnlyUpdatedColumnsMergeProcessor.java +++ b/core/trino-main/src/main/java/io/trino/operator/ChangeOnlyUpdatedColumnsMergeProcessor.java @@ -79,7 +79,7 @@ public Page transformPage(Page inputPage) Block operationChannelBlock = mergeRow.getField(mergeRow.getFieldCount() - 2); builder.add(operationChannelBlock); builder.add(inputPage.getBlock(rowIdChannel)); - builder.add(new RunLengthEncodedBlock(INSERT_FROM_UPDATE_BLOCK, positionCount)); + builder.add(RunLengthEncodedBlock.create(INSERT_FROM_UPDATE_BLOCK, positionCount)); Page result = new Page(builder.toArray(Block[]::new)); diff --git a/core/trino-main/src/main/java/io/trino/operator/GroupIdOperator.java b/core/trino-main/src/main/java/io/trino/operator/GroupIdOperator.java index 88d4716d9049..978ab4636dbc 100644 --- a/core/trino-main/src/main/java/io/trino/operator/GroupIdOperator.java +++ b/core/trino-main/src/main/java/io/trino/operator/GroupIdOperator.java @@ -178,14 +178,14 @@ private Page generateNextPage() for (int i = 0; i < groupingSetInputs[currentGroupingSet].length; i++) { if (groupingSetInputs[currentGroupingSet][i] == -1) { - outputBlocks[i] = new RunLengthEncodedBlock(nullBlocks[i], currentPage.getPositionCount()); + outputBlocks[i] = RunLengthEncodedBlock.create(nullBlocks[i], currentPage.getPositionCount()); } else { outputBlocks[i] = currentPage.getBlock(groupingSetInputs[currentGroupingSet][i]); } } - outputBlocks[outputBlocks.length - 1] = new RunLengthEncodedBlock(groupIdBlocks[currentGroupingSet], currentPage.getPositionCount()); + outputBlocks[outputBlocks.length - 1] = RunLengthEncodedBlock.create(groupIdBlocks[currentGroupingSet], currentPage.getPositionCount()); currentGroupingSet = (currentGroupingSet + 1) % groupingSetInputs.length; Page outputPage = new Page(currentPage.getPositionCount(), outputBlocks); diff --git a/core/trino-main/src/main/java/io/trino/operator/MarkDistinctHash.java b/core/trino-main/src/main/java/io/trino/operator/MarkDistinctHash.java index 859d36ff2b8e..119d480742ff 100644 --- a/core/trino-main/src/main/java/io/trino/operator/MarkDistinctHash.java +++ b/core/trino-main/src/main/java/io/trino/operator/MarkDistinctHash.java @@ -67,12 +67,12 @@ private Block processNextGroupIds(GroupByIdBlock ids) // must have > 1 positions to benefit from using a RunLengthEncoded block if (nextDistinctId == ids.getGroupCount()) { // no new distinct positions - return new RunLengthEncodedBlock(BooleanType.createBlockForSingleNonNullValue(false), positions); + return RunLengthEncodedBlock.create(BooleanType.createBlockForSingleNonNullValue(false), positions); } if (nextDistinctId + positions == ids.getGroupCount()) { // all positions are distinct nextDistinctId = ids.getGroupCount(); - return new RunLengthEncodedBlock(BooleanType.createBlockForSingleNonNullValue(true), positions); + return RunLengthEncodedBlock.create(BooleanType.createBlockForSingleNonNullValue(true), positions); } } byte[] distinctMask = new byte[positions]; diff --git a/core/trino-main/src/main/java/io/trino/operator/MultiChannelGroupByHash.java b/core/trino-main/src/main/java/io/trino/operator/MultiChannelGroupByHash.java index fcfe04ea9d5f..7d0a776b4d75 100644 --- a/core/trino-main/src/main/java/io/trino/operator/MultiChannelGroupByHash.java +++ b/core/trino-main/src/main/java/io/trino/operator/MultiChannelGroupByHash.java @@ -972,7 +972,7 @@ public GroupByIdBlock getResult() return new GroupByIdBlock( nextGroupId, - new RunLengthEncodedBlock( + RunLengthEncodedBlock.create( BIGINT.createFixedSizeBlockBuilder(1).writeLong(groupId).build(), page.getPositionCount())); } diff --git a/core/trino-main/src/main/java/io/trino/operator/join/NestedLoopJoinOperator.java b/core/trino-main/src/main/java/io/trino/operator/join/NestedLoopJoinOperator.java index f70775367d64..20ff30a1ad35 100644 --- a/core/trino-main/src/main/java/io/trino/operator/join/NestedLoopJoinOperator.java +++ b/core/trino-main/src/main/java/io/trino/operator/join/NestedLoopJoinOperator.java @@ -392,7 +392,7 @@ public Page next() // For the page with less rows, create RLE blocks and add them to the blocks array for (int i = 0; i < smallPageOutputBlocks.length; i++) { Block block = smallPageOutputBlocks[i].getSingleValueBlock(rowIndex); - resultBlockBuffer[indexForRleBlocks + i] = new RunLengthEncodedBlock(block, largePagePositionCount); + resultBlockBuffer[indexForRleBlocks + i] = RunLengthEncodedBlock.create(block, largePagePositionCount); } // Page constructor will create a copy of the block buffer (and must for correctness) return new Page(largePagePositionCount, resultBlockBuffer); diff --git a/core/trino-main/src/main/java/io/trino/operator/output/BytePositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/BytePositionsAppender.java index b335280dc20f..c75c3ddc7ad0 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/BytePositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/BytePositionsAppender.java @@ -94,9 +94,8 @@ public void append(IntArrayList positions, Block block) } @Override - public void appendRle(RunLengthEncodedBlock block) + public void appendRle(Block block, int rlePositionCount) { - int rlePositionCount = block.getPositionCount(); if (rlePositionCount == 0) { return; } @@ -124,7 +123,7 @@ public Block build() result = new ByteArrayBlock(positionCount, hasNullValue ? Optional.of(valueIsNull) : Optional.empty(), values); } else { - result = new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + result = RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } reset(); return result; diff --git a/core/trino-main/src/main/java/io/trino/operator/output/Int128PositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/Int128PositionsAppender.java index f3d86ab142ae..1b3cb4cd641e 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/Int128PositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/Int128PositionsAppender.java @@ -101,9 +101,8 @@ public void append(IntArrayList positions, Block block) } @Override - public void appendRle(RunLengthEncodedBlock block) + public void appendRle(Block block, int rlePositionCount) { - int rlePositionCount = block.getPositionCount(); if (rlePositionCount == 0) { return; } @@ -137,7 +136,7 @@ public Block build() result = new Int128ArrayBlock(positionCount, hasNullValue ? Optional.of(valueIsNull) : Optional.empty(), values); } else { - result = new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + result = RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } reset(); return result; diff --git a/core/trino-main/src/main/java/io/trino/operator/output/Int96PositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/Int96PositionsAppender.java index e8eaa32283c7..372eb2c417b7 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/Int96PositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/Int96PositionsAppender.java @@ -98,9 +98,8 @@ public void append(IntArrayList positions, Block block) } @Override - public void appendRle(RunLengthEncodedBlock block) + public void appendRle(Block block, int rlePositionCount) { - int rlePositionCount = block.getPositionCount(); if (rlePositionCount == 0) { return; } @@ -132,7 +131,7 @@ public Block build() result = new Int96ArrayBlock(positionCount, hasNullValue ? Optional.of(valueIsNull) : Optional.empty(), high, low); } else { - result = new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + result = RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } reset(); return result; diff --git a/core/trino-main/src/main/java/io/trino/operator/output/IntPositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/IntPositionsAppender.java index 0a523c567eb5..e7f7272abdb4 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/IntPositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/IntPositionsAppender.java @@ -94,9 +94,8 @@ public void append(IntArrayList positions, Block block) } @Override - public void appendRle(RunLengthEncodedBlock block) + public void appendRle(Block block, int rlePositionCount) { - int rlePositionCount = block.getPositionCount(); if (rlePositionCount == 0) { return; } @@ -124,7 +123,7 @@ public Block build() result = new IntArrayBlock(positionCount, hasNullValue ? Optional.of(valueIsNull) : Optional.empty(), values); } else { - result = new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + result = RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } reset(); return result; diff --git a/core/trino-main/src/main/java/io/trino/operator/output/LongPositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/LongPositionsAppender.java index 558c2c474562..95d5106bf6b9 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/LongPositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/LongPositionsAppender.java @@ -94,9 +94,8 @@ public void append(IntArrayList positions, Block block) } @Override - public void appendRle(RunLengthEncodedBlock block) + public void appendRle(Block block, int rlePositionCount) { - int rlePositionCount = block.getPositionCount(); if (rlePositionCount == 0) { return; } @@ -124,7 +123,7 @@ public Block build() result = new LongArrayBlock(positionCount, hasNullValue ? Optional.of(valueIsNull) : Optional.empty(), values); } else { - result = new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + result = RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } reset(); return result; diff --git a/core/trino-main/src/main/java/io/trino/operator/output/PagePartitioner.java b/core/trino-main/src/main/java/io/trino/operator/output/PagePartitioner.java index 836487a40ea4..f68d1a867918 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/PagePartitioner.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/PagePartitioner.java @@ -459,7 +459,7 @@ private Page getPartitionFunctionArguments(Page page) for (int i = 0; i < blocks.length; i++) { int channel = partitionChannels[i]; if (channel < 0) { - blocks[i] = new RunLengthEncodedBlock(partitionConstantBlocks[i], page.getPositionCount()); + blocks[i] = RunLengthEncodedBlock.create(partitionConstantBlocks[i], page.getPositionCount()); } else { blocks[i] = page.getBlock(channel); diff --git a/core/trino-main/src/main/java/io/trino/operator/output/PositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/PositionsAppender.java index 8a8697b77f04..1fda303a9df8 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/PositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/PositionsAppender.java @@ -14,7 +14,6 @@ package io.trino.operator.output; import io.trino.spi.block.Block; -import io.trino.spi.block.RunLengthEncodedBlock; import it.unimi.dsi.fastutil.ints.IntArrayList; public interface PositionsAppender @@ -22,12 +21,11 @@ public interface PositionsAppender void append(IntArrayList positions, Block source); /** - * Appends value from the {@code rleBlock} to this appender {@link RunLengthEncodedBlock#getPositionCount()} times. + * Appends the specified value positionCount times. * The result is the same as with using {@link PositionsAppender#append(IntArrayList, Block)} with - * positions list [0...{@link RunLengthEncodedBlock#getPositionCount()} -1] - * but with possible performance optimizations for {@link RunLengthEncodedBlock}. + * positions list [0...positionCount -1] but with possible performance optimizations. */ - void appendRle(RunLengthEncodedBlock rleBlock); + void appendRle(Block value, int rlePositionCount); /** * Creates the block from the appender data. diff --git a/core/trino-main/src/main/java/io/trino/operator/output/RleAwarePositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/RleAwarePositionsAppender.java index b007a906a3e5..c77479011cd7 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/RleAwarePositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/RleAwarePositionsAppender.java @@ -60,31 +60,32 @@ public void append(IntArrayList positions, Block source) } @Override - public void appendRle(RunLengthEncodedBlock source) + public void appendRle(Block value, int positionCount) { - if (source.getPositionCount() == 0) { + if (positionCount == 0) { return; } + checkArgument(value.getPositionCount() == 1, "Expected value to contain a single position but has %d positions".formatted(value.getPositionCount())); if (rlePositionCount == 0) { // initial empty state, switch to RLE state - rleValue = source.getValue(); - rlePositionCount = source.getPositionCount(); + rleValue = value; + rlePositionCount = positionCount; } else if (rleValue != null) { // we are in the RLE state - if (equalOperator.equalNullSafe(rleValue, 0, source.getValue(), 0)) { + if (equalOperator.equalNullSafe(rleValue, 0, value, 0)) { // the values match. we can just add positions. - this.rlePositionCount += source.getPositionCount(); + this.rlePositionCount += positionCount; return; } // RLE values do not match. switch to flat state switchToFlat(); - delegate.appendRle(source); + delegate.appendRle(value, positionCount); } else { // flat state - delegate.appendRle(source); + delegate.appendRle(value, positionCount); } } @@ -93,7 +94,7 @@ public Block build() { Block result; if (rleValue != null) { - result = new RunLengthEncodedBlock(rleValue, rlePositionCount); + result = RunLengthEncodedBlock.create(rleValue, rlePositionCount); } else { result = delegate.build(); @@ -127,7 +128,7 @@ private void switchToFlat() { if (rleValue != null) { // we are in the RLE state, flatten all RLE blocks - delegate.appendRle(new RunLengthEncodedBlock(rleValue, rlePositionCount)); + delegate.appendRle(rleValue, rlePositionCount); rleValue = null; } rlePositionCount = NO_RLE; diff --git a/core/trino-main/src/main/java/io/trino/operator/output/RowPositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/RowPositionsAppender.java index f64a5ab91b76..70b4d334691c 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/RowPositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/RowPositionsAppender.java @@ -96,11 +96,10 @@ public void append(IntArrayList positions, Block block) } @Override - public void appendRle(RunLengthEncodedBlock rleBlock) + public void appendRle(Block value, int rlePositionCount) { - int rlePositionCount = rleBlock.getPositionCount(); ensureCapacity(rlePositionCount); - AbstractRowBlock sourceRowBlock = (AbstractRowBlock) rleBlock.getValue(); + AbstractRowBlock sourceRowBlock = (AbstractRowBlock) value; if (sourceRowBlock.isNull(0)) { // append rlePositionCount nulls Arrays.fill(rowIsNull, positionCount, positionCount + rlePositionCount, true); @@ -111,7 +110,7 @@ public void appendRle(RunLengthEncodedBlock rleBlock) List fieldBlocks = sourceRowBlock.getChildren(); int fieldPosition = sourceRowBlock.getFieldBlockOffset(0); for (int i = 0; i < fieldAppenders.length; i++) { - fieldAppenders[i].appendRle(new RunLengthEncodedBlock(fieldBlocks.get(i).getSingleValueBlock(fieldPosition), rlePositionCount)); + fieldAppenders[i].appendRle(fieldBlocks.get(i).getSingleValueBlock(fieldPosition), rlePositionCount); } hasNonNullRow = true; } @@ -132,7 +131,7 @@ public Block build() } else { Block nullRowBlock = fromFieldBlocks(1, Optional.of(new boolean[] {true}), fieldBlocks); - result = new RunLengthEncodedBlock(nullRowBlock, positionCount); + result = RunLengthEncodedBlock.create(nullRowBlock, positionCount); } reset(); diff --git a/core/trino-main/src/main/java/io/trino/operator/output/ShortPositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/ShortPositionsAppender.java index 6cd8192d0a9a..0b81d8e6a7d7 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/ShortPositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/ShortPositionsAppender.java @@ -94,9 +94,8 @@ public void append(IntArrayList positions, Block block) } @Override - public void appendRle(RunLengthEncodedBlock block) + public void appendRle(Block block, int rlePositionCount) { - int rlePositionCount = block.getPositionCount(); if (rlePositionCount == 0) { return; } @@ -124,7 +123,7 @@ public Block build() result = new ShortArrayBlock(positionCount, hasNullValue ? Optional.of(valueIsNull) : Optional.empty(), values); } else { - result = new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + result = RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } reset(); return result; diff --git a/core/trino-main/src/main/java/io/trino/operator/output/SlicePositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/SlicePositionsAppender.java index 98cfda83d179..0cf807a9d6e2 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/SlicePositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/SlicePositionsAppender.java @@ -121,9 +121,8 @@ public void append(IntArrayList positions, Block block) } @Override - public void appendRle(RunLengthEncodedBlock block) + public void appendRle(Block block, int rlePositionCount) { - int rlePositionCount = block.getPositionCount(); if (rlePositionCount == 0) { return; } @@ -154,7 +153,7 @@ public Block build() hasNullValue ? Optional.of(valueIsNull) : Optional.empty()); } else { - result = new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + result = RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } reset(); return result; diff --git a/core/trino-main/src/main/java/io/trino/operator/output/TypedPositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/TypedPositionsAppender.java index 764309454b84..4aa446f3a66c 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/TypedPositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/TypedPositionsAppender.java @@ -15,7 +15,6 @@ import io.trino.spi.block.Block; import io.trino.spi.block.BlockBuilder; -import io.trino.spi.block.RunLengthEncodedBlock; import io.trino.spi.type.Type; import it.unimi.dsi.fastutil.ints.IntArrayList; import org.openjdk.jol.info.ClassLayout; @@ -53,9 +52,9 @@ public void append(IntArrayList positions, Block source) } @Override - public void appendRle(RunLengthEncodedBlock block) + public void appendRle(Block block, int rlePositionCount) { - for (int i = 0; i < block.getPositionCount(); i++) { + for (int i = 0; i < rlePositionCount; i++) { type.appendTo(block, 0, blockBuilder); } } diff --git a/core/trino-main/src/main/java/io/trino/operator/output/UnnestingPositionsAppender.java b/core/trino-main/src/main/java/io/trino/operator/output/UnnestingPositionsAppender.java index 86bbbb98bee3..88cdd3382f14 100644 --- a/core/trino-main/src/main/java/io/trino/operator/output/UnnestingPositionsAppender.java +++ b/core/trino-main/src/main/java/io/trino/operator/output/UnnestingPositionsAppender.java @@ -19,14 +19,10 @@ import it.unimi.dsi.fastutil.ints.IntArrayList; import org.openjdk.jol.info.ClassLayout; -import static com.google.common.base.Preconditions.checkArgument; import static java.util.Objects.requireNonNull; /** * Dispatches the {@link #append} and {@link #appendRle} methods to the {@link #delegate} depending on the input {@link Block} class. - * The {@link Block} is flattened if necessary so that the {@link #delegate} {@link PositionsAppender#append(IntArrayList, Block)} - * always gets flat {@link Block} and {@link PositionsAppender#appendRle(RunLengthEncodedBlock)} always gets {@link RunLengthEncodedBlock} - * with {@link RunLengthEncodedBlock#getValue()} being flat {@link Block}. */ public class UnnestingPositionsAppender implements PositionsAppender @@ -47,7 +43,7 @@ public void append(IntArrayList positions, Block source) return; } if (source instanceof RunLengthEncodedBlock) { - delegate.appendRle(flatten((RunLengthEncodedBlock) source, positions.size())); + delegate.appendRle(((RunLengthEncodedBlock) source).getValue(), positions.size()); } else if (source instanceof DictionaryBlock) { appendDictionary(positions, (DictionaryBlock) source); @@ -58,12 +54,12 @@ else if (source instanceof DictionaryBlock) { } @Override - public void appendRle(RunLengthEncodedBlock source) + public void appendRle(Block block, int rlePositionCount) { - if (source.getPositionCount() == 0) { + if (rlePositionCount == 0) { return; } - delegate.appendRle(flatten(source, source.getPositionCount())); + delegate.appendRle(block, rlePositionCount); } @Override @@ -86,30 +82,7 @@ public long getSizeInBytes() private void appendDictionary(IntArrayList positions, DictionaryBlock source) { - Block dictionary = source.getDictionary(); - - while (dictionary instanceof RunLengthEncodedBlock || dictionary instanceof DictionaryBlock) { - if (dictionary instanceof RunLengthEncodedBlock) { - // if at some level dictionary contains only a single value then it can be flattened to rle - appendRle(new RunLengthEncodedBlock(((RunLengthEncodedBlock) dictionary).getValue(), positions.size())); - return; - } - - // dictionary is a nested dictionary. we need to remap the ids - DictionaryBlock nestedDictionary = (DictionaryBlock) dictionary; - positions = mapPositions(positions, source); - dictionary = nestedDictionary.getDictionary(); - source = nestedDictionary; - } - delegate.append(mapPositions(positions, source), dictionary); - } - - private RunLengthEncodedBlock flatten(RunLengthEncodedBlock source, int positionCount) - { - checkArgument(positionCount > 0); - Block value = source.getValue().getSingleValueBlock(0); - checkArgument(!(value instanceof DictionaryBlock) && !(value instanceof RunLengthEncodedBlock), "value must be flat but got %s", value); - return new RunLengthEncodedBlock(value, positionCount); + delegate.append(mapPositions(positions, source), source.getDictionary()); } private IntArrayList mapPositions(IntArrayList positions, DictionaryBlock block) diff --git a/core/trino-main/src/main/java/io/trino/operator/project/ConstantPageProjection.java b/core/trino-main/src/main/java/io/trino/operator/project/ConstantPageProjection.java index cddc8683a46f..fddadaeedb78 100644 --- a/core/trino-main/src/main/java/io/trino/operator/project/ConstantPageProjection.java +++ b/core/trino-main/src/main/java/io/trino/operator/project/ConstantPageProjection.java @@ -63,6 +63,6 @@ public InputChannels getInputChannels() @Override public Work project(ConnectorSession session, DriverYieldSignal yieldSignal, Page page, SelectedPositions selectedPositions) { - return new CompletedWork<>(new RunLengthEncodedBlock(value, selectedPositions.size())); + return new CompletedWork<>(RunLengthEncodedBlock.create(value, selectedPositions.size())); } } diff --git a/core/trino-main/src/main/java/io/trino/operator/project/DictionaryAwarePageProjection.java b/core/trino-main/src/main/java/io/trino/operator/project/DictionaryAwarePageProjection.java index 5f0934e52f62..1cb095b6057e 100644 --- a/core/trino-main/src/main/java/io/trino/operator/project/DictionaryAwarePageProjection.java +++ b/core/trino-main/src/main/java/io/trino/operator/project/DictionaryAwarePageProjection.java @@ -32,6 +32,7 @@ import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; +import static io.trino.spi.block.DictionaryBlock.createProjectedDictionaryBlock; import static java.util.Objects.requireNonNull; public class DictionaryAwarePageProjection @@ -161,7 +162,7 @@ private boolean processInternal() if (block instanceof RunLengthEncodedBlock) { // single value block is always considered effective, but the processing could have thrown // in that case we fallback and process again so the correct error message sent - result = new RunLengthEncodedBlock(dictionaryOutput.get(), selectedPositions.size()); + result = RunLengthEncodedBlock.create(dictionaryOutput.get(), selectedPositions.size()); return true; } @@ -169,7 +170,7 @@ private boolean processInternal() DictionaryBlock dictionaryBlock = (DictionaryBlock) block; // if dictionary was processed, produce a dictionary block; otherwise do normal processing int[] outputIds = filterDictionaryIds(dictionaryBlock, selectedPositions); - result = new DictionaryBlock(selectedPositions.size(), dictionaryOutput.get(), outputIds, false, sourceIdFunction.apply(dictionaryBlock)); + result = createProjectedDictionaryBlock(selectedPositions.size(), dictionaryOutput.get(), outputIds, sourceIdFunction.apply(dictionaryBlock)); return true; } diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayCombinationsFunction.java b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayCombinationsFunction.java index 625c3c730d6d..689447160644 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayCombinationsFunction.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayCombinationsFunction.java @@ -81,7 +81,7 @@ public static Block combinations( int[] offsets = new int[combinationCount + 1]; setAll(offsets, i -> i * combinationLength); - return ArrayBlock.fromElementBlock(combinationCount, Optional.empty(), offsets, new DictionaryBlock(array, ids)); + return ArrayBlock.fromElementBlock(combinationCount, Optional.empty(), offsets, DictionaryBlock.create(ids.length, array, ids)); } @VisibleForTesting diff --git a/core/trino-main/src/main/java/io/trino/operator/unnest/ReplicatedBlockBuilder.java b/core/trino-main/src/main/java/io/trino/operator/unnest/ReplicatedBlockBuilder.java index 346f21e418f0..7c566eec0f8a 100644 --- a/core/trino-main/src/main/java/io/trino/operator/unnest/ReplicatedBlockBuilder.java +++ b/core/trino-main/src/main/java/io/trino/operator/unnest/ReplicatedBlockBuilder.java @@ -42,6 +42,6 @@ public Block buildOutputBlock(int[] outputEntriesPerPosition, int offset, int in fromPosition = toPosition; } - return new DictionaryBlock(outputRowCount, source, ids); + return DictionaryBlock.create(outputRowCount, source, ids); } } diff --git a/core/trino-main/src/main/java/io/trino/operator/unnest/UnnestBlockBuilder.java b/core/trino-main/src/main/java/io/trino/operator/unnest/UnnestBlockBuilder.java index f57827883d88..ddf7cd8b0070 100644 --- a/core/trino-main/src/main/java/io/trino/operator/unnest/UnnestBlockBuilder.java +++ b/core/trino-main/src/main/java/io/trino/operator/unnest/UnnestBlockBuilder.java @@ -114,7 +114,7 @@ private Block buildWithNullsByDictionary( } } - return new DictionaryBlock(outputPositionCount, source, ids); + return DictionaryBlock.create(outputPositionCount, source, ids); } /** diff --git a/core/trino-main/src/main/java/io/trino/operator/window/pattern/ProjectingPagesWindowIndex.java b/core/trino-main/src/main/java/io/trino/operator/window/pattern/ProjectingPagesWindowIndex.java index dbae0774eebe..e71f70a2415d 100644 --- a/core/trino-main/src/main/java/io/trino/operator/window/pattern/ProjectingPagesWindowIndex.java +++ b/core/trino-main/src/main/java/io/trino/operator/window/pattern/ProjectingPagesWindowIndex.java @@ -244,7 +244,7 @@ public Block getRawBlock(int channel, int position) // projection always creates a single row block, and will not align with the blocks from the pages index, // so we use an RLE block of the same length as the raw block int rawBlockPositionCount = pagesIndex.getRawBlock(0, position(position)).getPositionCount(); - return new RunLengthEncodedBlock(compute, rawBlockPositionCount); + return RunLengthEncodedBlock.create(compute, rawBlockPositionCount); } @Override diff --git a/core/trino-main/src/test/java/io/trino/block/BlockAssertions.java b/core/trino-main/src/test/java/io/trino/block/BlockAssertions.java index 9a60b031b2c3..0d772bab2256 100644 --- a/core/trino-main/src/test/java/io/trino/block/BlockAssertions.java +++ b/core/trino-main/src/test/java/io/trino/block/BlockAssertions.java @@ -52,7 +52,6 @@ import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.airlift.slice.Slices.utf8Slice; import static io.trino.spi.block.ArrayBlock.fromElementBlock; -import static io.trino.spi.block.DictionaryId.randomDictionaryId; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.BooleanType.BOOLEAN; import static io.trino.spi.type.DateType.DATE; @@ -122,7 +121,7 @@ public static void assertBlockEquals(Type type, Block actual, Block expected) } } - public static DictionaryBlock createRandomDictionaryBlock(Block dictionary, int positionCount) + public static Block createRandomDictionaryBlock(Block dictionary, int positionCount) { checkArgument(dictionary.getPositionCount() > 0, "dictionary position count %s is less than or equal to 0", dictionary.getPositionCount()); @@ -130,13 +129,13 @@ public static DictionaryBlock createRandomDictionaryBlock(Block dictionary, int int[] ids = IntStream.range(0, positionCount) .map(i -> random.nextInt(dictionary.getPositionCount())) .toArray(); - return new DictionaryBlock(0, positionCount, dictionary, ids, false, randomDictionaryId()); + return DictionaryBlock.create(positionCount, dictionary, ids); } public static RunLengthEncodedBlock createRandomRleBlock(Block block, int positionCount) { - checkArgument(block.getPositionCount() > 0, format("block positions %d is less than or equal to 0", block.getPositionCount())); - return new RunLengthEncodedBlock(block.getSingleValueBlock(random().nextInt(block.getPositionCount())), positionCount); + checkArgument(block.getPositionCount() >= 2, format("block positions %d is less 2", block.getPositionCount())); + return (RunLengthEncodedBlock) RunLengthEncodedBlock.create(block.getSingleValueBlock(random().nextInt(block.getPositionCount())), positionCount); } public static Block createRandomBlockForType(Type type, int positionCount, float nullRate) @@ -446,7 +445,7 @@ public static Block createStringDictionaryBlock(int start, int length) for (int i = 0; i < length; i++) { ids[i] = i % dictionarySize; } - return new DictionaryBlock(builder.build(), ids); + return DictionaryBlock.create(ids.length, builder.build(), ids); } public static Block createStringArraysBlock(Iterable> values) @@ -717,7 +716,7 @@ public static Block createLongDictionaryBlock(int start, int length, int diction for (int i = 0; i < length; i++) { ids[i] = i % dictionarySize; } - return new DictionaryBlock(builder.build(), ids); + return DictionaryBlock.create(ids.length, builder.build(), ids); } public static Block createLongRepeatBlock(int value, int length) @@ -894,18 +893,18 @@ public static Block createColorSequenceBlock(int start, int end) return builder.build(); } - public static RunLengthEncodedBlock createRLEBlock(double value, int positionCount) + public static Block createRepeatedValuesBlock(double value, int positionCount) { BlockBuilder blockBuilder = DOUBLE.createBlockBuilder(null, 1); DOUBLE.writeDouble(blockBuilder, value); - return new RunLengthEncodedBlock(blockBuilder.build(), positionCount); + return RunLengthEncodedBlock.create(blockBuilder.build(), positionCount); } - public static RunLengthEncodedBlock createRLEBlock(long value, int positionCount) + public static Block createRepeatedValuesBlock(long value, int positionCount) { BlockBuilder blockBuilder = BIGINT.createBlockBuilder(null, 1); BIGINT.writeLong(blockBuilder, value); - return new RunLengthEncodedBlock(blockBuilder.build(), positionCount); + return RunLengthEncodedBlock.create(blockBuilder.build(), positionCount); } private static Block createBlock(Type type, ValueWriter valueWriter, Iterable values) diff --git a/core/trino-main/src/test/java/io/trino/block/ColumnarTestUtils.java b/core/trino-main/src/test/java/io/trino/block/ColumnarTestUtils.java index b2b9aa764eaf..e183c431480f 100644 --- a/core/trino-main/src/test/java/io/trino/block/ColumnarTestUtils.java +++ b/core/trino-main/src/test/java/io/trino/block/ColumnarTestUtils.java @@ -118,10 +118,10 @@ private static Block copyBlock(Block block) return BLOCK_ENCODING_SERDE.readBlock(sliceOutput.slice().getInput()); } - public static DictionaryBlock createTestDictionaryBlock(Block block) + public static Block createTestDictionaryBlock(Block block) { int[] dictionaryIndexes = createTestDictionaryIndexes(block.getPositionCount()); - return new DictionaryBlock(dictionaryIndexes.length, block, dictionaryIndexes); + return DictionaryBlock.create(dictionaryIndexes.length, block, dictionaryIndexes); } public static T[] createTestDictionaryExpectedValues(T[] expectedValues) @@ -157,6 +157,6 @@ public static T[] createTestRleExpectedValues(T[] expectedValues, int positi public static RunLengthEncodedBlock createTestRleBlock(Block block, int position) { - return new RunLengthEncodedBlock(block.getRegion(position, 1), 10); + return (RunLengthEncodedBlock) RunLengthEncodedBlock.create(block.getRegion(position, 1), 10); } } diff --git a/core/trino-main/src/test/java/io/trino/block/TestColumnarArray.java b/core/trino-main/src/test/java/io/trino/block/TestColumnarArray.java index 3888a13ced82..de11862d45f1 100644 --- a/core/trino-main/src/test/java/io/trino/block/TestColumnarArray.java +++ b/core/trino-main/src/test/java/io/trino/block/TestColumnarArray.java @@ -18,7 +18,6 @@ import io.trino.spi.block.Block; import io.trino.spi.block.BlockBuilder; import io.trino.spi.block.ColumnarArray; -import io.trino.spi.block.DictionaryBlock; import io.trino.spi.block.RunLengthEncodedBlock; import io.trino.spi.type.ArrayType; import org.testng.annotations.Test; @@ -86,7 +85,7 @@ private static void verifyBlock(Block block, T[] expectedValues) private static void assertDictionaryBlock(Block block, T[] expectedValues) { - DictionaryBlock dictionaryBlock = createTestDictionaryBlock(block); + Block dictionaryBlock = createTestDictionaryBlock(block); T[] expectedDictionaryValues = createTestDictionaryExpectedValues(expectedValues); assertBlock(dictionaryBlock, expectedDictionaryValues); diff --git a/core/trino-main/src/test/java/io/trino/block/TestColumnarMap.java b/core/trino-main/src/test/java/io/trino/block/TestColumnarMap.java index d45e944ab9a0..2d316822f463 100644 --- a/core/trino-main/src/test/java/io/trino/block/TestColumnarMap.java +++ b/core/trino-main/src/test/java/io/trino/block/TestColumnarMap.java @@ -18,7 +18,6 @@ import io.trino.spi.block.Block; import io.trino.spi.block.BlockBuilder; import io.trino.spi.block.ColumnarMap; -import io.trino.spi.block.DictionaryBlock; import io.trino.spi.block.MapBlockBuilder; import io.trino.spi.block.RunLengthEncodedBlock; import io.trino.spi.type.MapType; @@ -94,7 +93,7 @@ private static void verifyBlock(Block block, Slice[][][] expectedValues) private static void assertDictionaryBlock(Block block, Slice[][][] expectedValues) { - DictionaryBlock dictionaryBlock = createTestDictionaryBlock(block); + Block dictionaryBlock = createTestDictionaryBlock(block); Slice[][][] expectedDictionaryValues = createTestDictionaryExpectedValues(expectedValues); assertBlock(dictionaryBlock, expectedDictionaryValues); diff --git a/core/trino-main/src/test/java/io/trino/block/TestColumnarRow.java b/core/trino-main/src/test/java/io/trino/block/TestColumnarRow.java index f65db3127c0b..6c39000aef6e 100644 --- a/core/trino-main/src/test/java/io/trino/block/TestColumnarRow.java +++ b/core/trino-main/src/test/java/io/trino/block/TestColumnarRow.java @@ -19,7 +19,6 @@ import io.trino.spi.block.BlockBuilder; import io.trino.spi.block.BlockBuilderStatus; import io.trino.spi.block.ColumnarRow; -import io.trino.spi.block.DictionaryBlock; import io.trino.spi.block.RowBlockBuilder; import io.trino.spi.block.RunLengthEncodedBlock; import org.testng.annotations.Test; @@ -89,7 +88,7 @@ private static void verifyBlock(Block block, T[] expectedValues) private static void assertDictionaryBlock(Block block, T[] expectedValues) { - DictionaryBlock dictionaryBlock = createTestDictionaryBlock(block); + Block dictionaryBlock = createTestDictionaryBlock(block); T[] expectedDictionaryValues = createTestDictionaryExpectedValues(expectedValues); assertBlock(dictionaryBlock, expectedDictionaryValues); diff --git a/core/trino-main/src/test/java/io/trino/block/TestDictionaryBlock.java b/core/trino-main/src/test/java/io/trino/block/TestDictionaryBlock.java index 95519637e4ab..141c2f5a422c 100644 --- a/core/trino-main/src/test/java/io/trino/block/TestDictionaryBlock.java +++ b/core/trino-main/src/test/java/io/trino/block/TestDictionaryBlock.java @@ -28,9 +28,10 @@ import java.util.Optional; import java.util.stream.IntStream; +import static com.google.common.base.Preconditions.checkArgument; import static io.airlift.slice.SizeOf.SIZE_OF_INT; -import static io.airlift.testing.Assertions.assertInstanceOf; import static io.trino.block.BlockAssertions.createSlicesBlock; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; @@ -40,6 +41,44 @@ public class TestDictionaryBlock extends AbstractTestBlock { + @Test + public void testConstructionNoPositions() + { + Slice[] expectedValues = createExpectedValues(10); + Block dictionary = createSlicesBlock(expectedValues); + + Block block = DictionaryBlock.create(0, dictionary, new int[] {1, 5, 9}); + assertThat(block).isInstanceOf(VariableWidthBlock.class); + assertThat(block.getPositionCount()).isEqualTo(0); + } + + @Test + public void testConstructionOnePositions() + { + Slice[] expectedValues = createExpectedValues(10); + Block dictionary = createSlicesBlock(expectedValues); + + Block block = DictionaryBlock.create(1, dictionary, new int[] {1, 5, 9}); + assertThat(block).isInstanceOf(VariableWidthBlock.class); + assertThat(block.getPositionCount()).isEqualTo(1); + assertThat(block.getSlice(0, 0, block.getSliceLength(0))).isEqualTo(expectedValues[1]); + } + + @Test + public void testConstructionUnnestDictionary() + { + Slice[] expectedValues = createExpectedValues(10); + Block innerDictionary = createSlicesBlock(expectedValues); + DictionaryBlock dictionary = (DictionaryBlock) DictionaryBlock.create(4, innerDictionary, new int[] {1, 3, 5, 7}); + + Block block = DictionaryBlock.create(2, dictionary, new int[] {1, 3}); + assertThat(block).isInstanceOf(DictionaryBlock.class); + assertBlock(block, TestDictionaryBlock::createBlockBuilder, new Slice[] {expectedValues[3], expectedValues[7]}); + + Block actualDictionary = ((DictionaryBlock) block).getDictionary(); + assertThat(actualDictionary).isSameAs(innerDictionary); + } + @Test public void testSizeInBytes() { @@ -83,43 +122,6 @@ public void testCopyRegionCreatesCompactBlock() assertTrue(copyRegionDictionaryBlock.isCompact()); } - @Test - public void testCopyRegionUnwrapping() - { - Slice[] expectedValues = createExpectedValues(10); - DictionaryBlock dictionaryBlock = createDictionaryBlock(expectedValues, 100); - - // 0 and 1 position copies unwrap the dictionary - assertInstanceOf(dictionaryBlock.copyRegion(0, 0), VariableWidthBlock.class); - assertInstanceOf(dictionaryBlock.copyRegion(0, 1), VariableWidthBlock.class); - - // Compact dictionaries unwrap - DictionaryBlock compactSequentialDictionary = createDictionaryBlock(expectedValues, expectedValues.length); - assertTrue(compactSequentialDictionary.isCompact()); // force uniqueIds to be computed - assertInstanceOf(compactSequentialDictionary.copyRegion(0, 5), VariableWidthBlock.class); - - // Nested dictionaries unwrap, even when not compact - DictionaryBlock outerDictionary = new DictionaryBlock(compactSequentialDictionary, new int[]{1, 3, 5, 7}); - assertFalse(outerDictionary.isCompact()); // force uniqueIds to be computed - // inner dictionary is compact, and also unwraps - assertInstanceOf(outerDictionary.copyRegion(1, 2), VariableWidthBlock.class); - - // Nested dictionaries unwrap down to a single dictionary layer when ids are repeated - Block innermostRawBlock = createSlicesBlock(expectedValues); - DictionaryBlock firstDictionary = new DictionaryBlock(innermostRawBlock, new int[]{0, 1, 1, 7, 7, 5, 3}); - DictionaryBlock secondDictionary = new DictionaryBlock(firstDictionary, new int[]{3, 1, 1, 2}); - DictionaryBlock thirdDictionary = new DictionaryBlock(secondDictionary, new int[]{0, 0, 0, 2}); - // Result is still a dictionary, but only a single layer - assertInstanceOf(thirdDictionary.copyRegion(0, 2), DictionaryBlock.class); - assertInstanceOf(((DictionaryBlock) thirdDictionary.copyRegion(0, 2)).getDictionary(), VariableWidthBlock.class); - // Re-check after all dictionaries have their uniqueIds computed - assertFalse(firstDictionary.isCompact()); - assertFalse(secondDictionary.isCompact()); - assertFalse(thirdDictionary.isCompact()); - assertInstanceOf(thirdDictionary.copyRegion(0, 2), DictionaryBlock.class); - assertInstanceOf(((DictionaryBlock) thirdDictionary.copyRegion(0, 2)).getDictionary(), VariableWidthBlock.class); - } - @Test public void testCopyPositionsWithCompaction() { @@ -182,50 +184,6 @@ public void testCopyPositionsNoCompaction() assertBlock(copiedBlock.getDictionary(), TestDictionaryBlock::createBlockBuilder, expectedValues); } - @Test - public void testCopyPositionsUnwrapping() - { - Slice[] expectedValues = createExpectedValues(10); - DictionaryBlock dictionaryBlock = createDictionaryBlock(expectedValues, 100); - assertTrue(dictionaryBlock.isCompact()); - - // 0 and 1 position copies unwrap the dictionary - assertInstanceOf(dictionaryBlock.copyPositions(new int[0], 0, 0), VariableWidthBlock.class); - assertInstanceOf(dictionaryBlock.copyPositions(new int[1], 0, 1), VariableWidthBlock.class); - - // Dictionaries unwrap when each selected id appears only once - int[] positionsToCopy = new int[] {9, 7, 5, 3, 2, 1, 0}; - assertInstanceOf(dictionaryBlock.copyPositions(positionsToCopy, 0, positionsToCopy.length), VariableWidthBlock.class); - - // Compact dictionaries unwrap - DictionaryBlock compactSequentialDictionary = createDictionaryBlock(expectedValues, expectedValues.length); - assertTrue(compactSequentialDictionary.isCompact()); // force uniqueIds to be computed - assertInstanceOf(compactSequentialDictionary.copyPositions(positionsToCopy, 0, positionsToCopy.length), VariableWidthBlock.class); - - // Nested dictionaries unwrap, even when not compact - DictionaryBlock outerDictionary = new DictionaryBlock(compactSequentialDictionary, new int[]{1, 3, 5, 7, 9}); - assertFalse(outerDictionary.isCompact()); // force uniqueIds to be computed - positionsToCopy = new int[] {0, 3, 2}; - // inner dictionary is compact, and also unwraps - assertInstanceOf(outerDictionary.copyPositions(positionsToCopy, 0, positionsToCopy.length), VariableWidthBlock.class); - - // Nested dictionaries unwrap down to a single dictionary layer when ids are repeated - Block innermostRawBlock = createSlicesBlock(expectedValues); - DictionaryBlock firstDictionary = new DictionaryBlock(innermostRawBlock, new int[]{0, 1, 1, 7, 7, 5, 3}); - DictionaryBlock secondDictionary = new DictionaryBlock(firstDictionary, new int[]{3, 1, 1, 2}); - DictionaryBlock thirdDictionary = new DictionaryBlock(secondDictionary, new int[]{0, 0, 0, 2}); - // Result is still a dictionary, but only a single layer - positionsToCopy = new int[] {2, 1, 0}; - assertInstanceOf(thirdDictionary.copyPositions(positionsToCopy, 0, 2), DictionaryBlock.class); - assertInstanceOf(((DictionaryBlock) thirdDictionary.copyPositions(positionsToCopy, 0, 2)).getDictionary(), VariableWidthBlock.class); - // Re-check after all dictionaries have their uniqueIds computed - assertFalse(firstDictionary.isCompact()); - assertFalse(secondDictionary.isCompact()); - assertFalse(thirdDictionary.isCompact()); - assertInstanceOf(thirdDictionary.copyPositions(positionsToCopy, 0, 2), DictionaryBlock.class); - assertInstanceOf(((DictionaryBlock) thirdDictionary.copyPositions(positionsToCopy, 0, 2)).getDictionary(), VariableWidthBlock.class); - } - @Test public void testCompact() { @@ -245,33 +203,6 @@ public void testCompact() assertEquals(reCompactedBlock.getDictionarySourceId(), compactBlock.getDictionarySourceId()); } - @Test - public void testNestedCompact() - { - Slice[] expectedValues = createExpectedValues(10); - Block valuesBlock = createSlicesBlock(expectedValues); - DictionaryBlock deeplyNestedDictionary = new DictionaryBlock(valuesBlock, new int[] {0, 1, 2, 2, 4, 5}); - DictionaryBlock nestedDictionary = new DictionaryBlock(deeplyNestedDictionary, new int[] {0, 1, 2, 3, 4, 5}); - DictionaryBlock dictionary = new DictionaryBlock(nestedDictionary, new int[] {2, 3, 2, 0}); - DictionaryBlock dictionaryWithAllPositionsUsed = new DictionaryBlock(nestedDictionary, new int[] {0, 1, 2, 3, 4, 5}); - - assertEquals( - dictionary.getSizeInBytes(), - valuesBlock.getPositionsSizeInBytes(new boolean[] {true, false, true, false, false, false}, 2) + 4 * Integer.BYTES); - assertFalse(dictionary.isCompact()); - - assertEquals( - dictionaryWithAllPositionsUsed.getSizeInBytes(), - valuesBlock.getPositionsSizeInBytes(new boolean[] {true, true, true, false, true, true}, 5) + 6 * Integer.BYTES); - // dictionary is not compact (even though all positions were used) because it's unnested - assertFalse(dictionaryWithAllPositionsUsed.isCompact()); - - DictionaryBlock compactBlock = dictionary.compact(); - assertBlock(compactBlock.getDictionary(), TestDictionaryBlock::createBlockBuilder, new Slice[] {expectedValues[2], expectedValues[0]}); - assertDictionaryIds(compactBlock, 0, 0, 0, 1); - assertInstanceOf(compactBlock.getDictionary(), VariableWidthBlock.class); - } - @Test public void testCompactAllKeysReferenced() { @@ -292,7 +223,7 @@ public void testCompactAllKeysReferenced() public void testBasicGetPositions() { Slice[] expectedValues = createExpectedValues(10); - Block dictionaryBlock = new DictionaryBlock(createSlicesBlock(expectedValues), new int[] {0, 1, 2, 3, 4, 5}); + Block dictionaryBlock = DictionaryBlock.create(6, createSlicesBlock(expectedValues), new int[] {0, 1, 2, 3, 4, 5}); assertBlock(dictionaryBlock, TestDictionaryBlock::createBlockBuilder, new Slice[] { expectedValues[0], expectedValues[1], expectedValues[2], expectedValues[3], expectedValues[4], expectedValues[5]}); DictionaryId dictionaryId = ((DictionaryBlock) dictionaryBlock).getDictionarySourceId(); @@ -347,7 +278,8 @@ public void testBasicGetPositions() @Test public void testCompactGetPositions() { - DictionaryBlock block = new DictionaryBlock(createSlicesBlock(createExpectedValues(10)), new int[] {0, 1, 2, 3, 4, 5}).compact(); + DictionaryBlock block = (DictionaryBlock) DictionaryBlock.create(6, createSlicesBlock(createExpectedValues(10)), new int[] {0, 1, 2, 3, 4, 5}); + block = block.compact(); // 3, 3, 4, 5, 2, 0, 1, 1 block = (DictionaryBlock) block.getPositions(new int[] {3, 3, 4, 5, 2, 0, 1, 1}, 0, 7); @@ -398,115 +330,56 @@ public void testEstimatedDataSizeForStats() } @Test - public void testNestedDictionarySizes() + public void testDictionarySizes() { - // fixed width block - Block fixedWidthBlock = new IntArrayBlock(100, Optional.empty(), IntStream.range(0, 100).toArray()); - assertDictionarySizeMethods(fixedWidthBlock); - assertDictionarySizeMethods(new DictionaryBlock(fixedWidthBlock, IntStream.range(0, 50).toArray())); - assertDictionarySizeMethods( - new DictionaryBlock( - new DictionaryBlock(fixedWidthBlock, IntStream.range(0, 50).toArray()), - IntStream.range(0, 10).toArray())); - - // variable width block - Block variableWidthBlock = createSlicesBlock(createExpectedValues(100)); - assertDictionarySizeMethods(variableWidthBlock); - assertDictionarySizeMethods(new DictionaryBlock(variableWidthBlock, IntStream.range(0, 50).toArray())); - assertDictionarySizeMethods( - new DictionaryBlock( - new DictionaryBlock(variableWidthBlock, IntStream.range(0, 50).toArray()), - IntStream.range(0, 10).toArray())); + assertDictionarySizeMethods(new IntArrayBlock(100, Optional.empty(), IntStream.range(0, 100).toArray())); + assertDictionarySizeMethods(createSlicesBlock(createExpectedValues(100))); } private static void assertDictionarySizeMethods(Block block) { + assertThat(block).isNotInstanceOf(DictionaryBlock.class); + int positions = block.getPositionCount(); + assertThat(positions > 0).isTrue(); int[] allIds = IntStream.range(0, positions).toArray(); - if (block instanceof DictionaryBlock) { - assertEquals( - new DictionaryBlock(block, allIds).getSizeInBytes(), - block.getSizeInBytes(), - "nested dictionary size should not be counted"); - } - else { - assertEquals(new DictionaryBlock(block, allIds).getSizeInBytes(), block.getSizeInBytes() + (Integer.BYTES * (long) positions)); - } + assertEquals(DictionaryBlock.create(allIds.length, block, allIds).getSizeInBytes(), block.getSizeInBytes() + (Integer.BYTES * (long) positions)); - if (positions > 0) { - int firstHalfLength = positions / 2; - int secondHalfLength = positions - firstHalfLength; - int[] firstHalfIds = IntStream.range(0, firstHalfLength).toArray(); - int[] secondHalfIds = IntStream.range(firstHalfLength, positions).toArray(); - - boolean[] selectedPositions = new boolean[positions]; - selectedPositions[0] = true; - if (block instanceof DictionaryBlock) { - assertEquals( - new DictionaryBlock(block, allIds).getPositionsSizeInBytes(selectedPositions, 1), - block.getPositionsSizeInBytes(selectedPositions, 1), - "nested dictionary blocks must not include nested id overhead"); - assertEquals( - new DictionaryBlock(block, new int[]{0}).getSizeInBytes(), - block.getPositionsSizeInBytes(selectedPositions, 1), - "nested dictionary blocks must not include nested id overhead"); - - Arrays.fill(selectedPositions, true); - assertEquals( - new DictionaryBlock(block, allIds).getPositionsSizeInBytes(selectedPositions, positions), - block.getSizeInBytes(), - "nested dictionary blocks must not include nested id overhead"); - - assertEquals( - new DictionaryBlock(block, firstHalfIds).getSizeInBytes(), - block.getRegionSizeInBytes(0, firstHalfLength), - "nested dictionary blocks must not include nested id overhead"); - assertEquals( - new DictionaryBlock(block, secondHalfIds).getSizeInBytes(), - block.getRegionSizeInBytes(firstHalfLength, secondHalfLength), - "nested dictionary blocks must not include nested id overhead"); - assertEquals( - new DictionaryBlock(block, allIds).getRegionSizeInBytes(0, firstHalfLength), - block.getRegionSizeInBytes(0, firstHalfLength), - "nested dictionary blocks must not include nested id overhead"); - assertEquals( - new DictionaryBlock(block, allIds).getRegionSizeInBytes(firstHalfLength, secondHalfLength), - block.getRegionSizeInBytes(firstHalfLength, secondHalfLength), - "nested dictionary blocks must not include nested id overhead"); - } - else { - assertEquals( - new DictionaryBlock(block, allIds).getPositionsSizeInBytes(selectedPositions, 1), - block.getPositionsSizeInBytes(selectedPositions, 1) + Integer.BYTES); - - assertEquals( - new DictionaryBlock(block, new int[]{0}).getSizeInBytes(), - block.getPositionsSizeInBytes(selectedPositions, 1) + Integer.BYTES); - - Arrays.fill(selectedPositions, true); - assertEquals( - new DictionaryBlock(block, allIds).getPositionsSizeInBytes(selectedPositions, positions), - block.getSizeInBytes() + (Integer.BYTES * (long) positions)); - - assertEquals( - new DictionaryBlock(block, firstHalfIds).getSizeInBytes(), - block.getRegionSizeInBytes(0, firstHalfLength) + (Integer.BYTES * (long) firstHalfLength)); - assertEquals( - new DictionaryBlock(block, secondHalfIds).getSizeInBytes(), - block.getRegionSizeInBytes(firstHalfLength, secondHalfLength) + (Integer.BYTES * (long) secondHalfLength)); - assertEquals( - new DictionaryBlock(block, allIds).getRegionSizeInBytes(0, firstHalfLength), - block.getRegionSizeInBytes(0, firstHalfLength) + (Integer.BYTES * (long) firstHalfLength)); - assertEquals( - new DictionaryBlock(block, allIds).getRegionSizeInBytes(firstHalfLength, secondHalfLength), - block.getRegionSizeInBytes(firstHalfLength, secondHalfLength) + (Integer.BYTES * (long) secondHalfLength)); - } - } + int firstHalfLength = positions / 2; + int secondHalfLength = positions - firstHalfLength; + int[] firstHalfIds = IntStream.range(0, firstHalfLength).toArray(); + int[] secondHalfIds = IntStream.range(firstHalfLength, positions).toArray(); + + boolean[] selectedPositions = new boolean[positions]; + selectedPositions[0] = true; + assertEquals( + DictionaryBlock.create(allIds.length, block, allIds).getPositionsSizeInBytes(selectedPositions, 1), + block.getPositionsSizeInBytes(selectedPositions, 1) + Integer.BYTES); + + Arrays.fill(selectedPositions, true); + assertEquals( + DictionaryBlock.create(allIds.length, block, allIds).getPositionsSizeInBytes(selectedPositions, positions), + block.getSizeInBytes() + (Integer.BYTES * (long) positions)); + + assertEquals( + DictionaryBlock.create(firstHalfIds.length, block, firstHalfIds).getSizeInBytes(), + block.getRegionSizeInBytes(0, firstHalfLength) + (Integer.BYTES * (long) firstHalfLength)); + assertEquals( + DictionaryBlock.create(secondHalfIds.length, block, secondHalfIds).getSizeInBytes(), + block.getRegionSizeInBytes(firstHalfLength, secondHalfLength) + (Integer.BYTES * (long) secondHalfLength)); + assertEquals( + DictionaryBlock.create(allIds.length, block, allIds).getRegionSizeInBytes(0, firstHalfLength), + block.getRegionSizeInBytes(0, firstHalfLength) + (Integer.BYTES * (long) firstHalfLength)); + assertEquals( + DictionaryBlock.create(allIds.length, block, allIds).getRegionSizeInBytes(firstHalfLength, secondHalfLength), + block.getRegionSizeInBytes(firstHalfLength, secondHalfLength) + (Integer.BYTES * (long) secondHalfLength)); } private static DictionaryBlock createDictionaryBlockWithUnreferencedKeys(Slice[] expectedValues, int positionCount) { + checkArgument(positionCount >= 2, "positionCount must be at least 2 for a dictionary block"); + // adds references to 0 and all odd indexes int dictionarySize = expectedValues.length; int[] ids = new int[positionCount]; @@ -518,18 +391,19 @@ private static DictionaryBlock createDictionaryBlockWithUnreferencedKeys(Slice[] } ids[i] = index; } - return new DictionaryBlock(createSlicesBlock(expectedValues), ids); + return (DictionaryBlock) DictionaryBlock.create(ids.length, createSlicesBlock(expectedValues), ids); } private static DictionaryBlock createDictionaryBlock(Slice[] expectedValues, int positionCount) { + checkArgument(positionCount >= 2, "positionCount must be at least 2 for a dictionary block"); int dictionarySize = expectedValues.length; int[] ids = new int[positionCount]; for (int i = 0; i < positionCount; i++) { ids[i] = i % dictionarySize; } - return new DictionaryBlock(createSlicesBlock(expectedValues), ids); + return (DictionaryBlock) DictionaryBlock.create(ids.length, createSlicesBlock(expectedValues), ids); } private static BlockBuilder createBlockBuilder() diff --git a/core/trino-main/src/test/java/io/trino/block/TestRunLengthEncodedBlock.java b/core/trino-main/src/test/java/io/trino/block/TestRunLengthEncodedBlock.java index 800a4ebefba4..70ac0cef9341 100644 --- a/core/trino-main/src/test/java/io/trino/block/TestRunLengthEncodedBlock.java +++ b/core/trino-main/src/test/java/io/trino/block/TestRunLengthEncodedBlock.java @@ -42,7 +42,7 @@ public void test() private void assertRleBlock(int positionCount) { Slice expectedValue = createExpectedValue(0); - Block block = new RunLengthEncodedBlock(createSingleValueBlock(expectedValue), positionCount); + Block block = RunLengthEncodedBlock.create(createSingleValueBlock(expectedValue), positionCount); Slice[] expectedValues = new Slice[positionCount]; for (int position = 0; position < positionCount; position++) { expectedValues[position] = expectedValue; @@ -66,7 +66,7 @@ private static BlockBuilder createBlockBuilder() public void testPositionsSizeInBytes() { Block valueBlock = createSingleValueBlock(createExpectedValue(10)); - Block rleBlock = new RunLengthEncodedBlock(valueBlock, 10); + Block rleBlock = RunLengthEncodedBlock.create(valueBlock, 10); // Size in bytes is not fixed per position assertTrue(rleBlock.fixedSizeInBytesPerPosition().isEmpty()); // Accepts specific position selection @@ -119,7 +119,7 @@ public void testEstimatedDataSizeForStats() { int positionCount = 10; Slice expectedValue = createExpectedValue(5); - Block block = new RunLengthEncodedBlock(createSingleValueBlock(expectedValue), positionCount); + Block block = RunLengthEncodedBlock.create(createSingleValueBlock(expectedValue), positionCount); for (int postition = 0; postition < positionCount; postition++) { assertEquals(block.getEstimatedDataSizeForStats(postition), expectedValue.length()); } diff --git a/core/trino-main/src/test/java/io/trino/execution/TestPageSplitterUtil.java b/core/trino-main/src/test/java/io/trino/execution/TestPageSplitterUtil.java index 32a23be1860d..44c27ab2ce45 100644 --- a/core/trino-main/src/test/java/io/trino/execution/TestPageSplitterUtil.java +++ b/core/trino-main/src/test/java/io/trino/execution/TestPageSplitterUtil.java @@ -83,7 +83,7 @@ public void testSplitPageNonDecreasingPageSize() Slice expectedValue = wrappedBuffer("test".getBytes(UTF_8)); BlockBuilder blockBuilder = VARCHAR.createBlockBuilder(null, 1, expectedValue.length()); blockBuilder.writeBytes(expectedValue, 0, expectedValue.length()).closeEntry(); - Block rleBlock = new RunLengthEncodedBlock(blockBuilder.build(), positionCount); + Block rleBlock = RunLengthEncodedBlock.create(blockBuilder.build(), positionCount); Page initialPage = new Page(rleBlock); List pages = splitPage(initialPage, maxPageSizeInBytes); diff --git a/core/trino-main/src/test/java/io/trino/execution/buffer/TestPagesSerde.java b/core/trino-main/src/test/java/io/trino/execution/buffer/TestPagesSerde.java index d118ea58f798..2d4a8c09ab87 100644 --- a/core/trino-main/src/test/java/io/trino/execution/buffer/TestPagesSerde.java +++ b/core/trino-main/src/test/java/io/trino/execution/buffer/TestPagesSerde.java @@ -68,7 +68,7 @@ public void testBigintSerializedSize() // empty page Page page = new Page(builder.build()); int pageSize = serializedSize(ImmutableList.of(BIGINT), page); - assertEquals(pageSize, 52); // page overhead ideally 35 but since a 0 sized block will be a RLEBlock we have an overhead of 17 + assertEquals(pageSize, 40); // page with one value BIGINT.writeLong(builder, 123); @@ -92,7 +92,7 @@ public void testVarcharSerializedSize() // empty page Page page = new Page(builder.build()); int pageSize = serializedSize(ImmutableList.of(VARCHAR), page); - assertEquals(pageSize, 60); // page overhead ideally 44 but since a 0 sized block will be a RLEBlock we have an overhead of 16 + assertEquals(pageSize, 44); // page with one value VARCHAR.writeString(builder, "alice"); diff --git a/core/trino-main/src/test/java/io/trino/operator/BenchmarkGroupByHash.java b/core/trino-main/src/test/java/io/trino/operator/BenchmarkGroupByHash.java index a1b233ae9fa2..d5d261a4281a 100644 --- a/core/trino-main/src/test/java/io/trino/operator/BenchmarkGroupByHash.java +++ b/core/trino-main/src/test/java/io/trino/operator/BenchmarkGroupByHash.java @@ -255,7 +255,7 @@ else if (pageCount % 3 == 1) { // rle page Block[] blocks = new Block[page.getChannelCount()]; for (int channel = 0; channel < blocks.length; ++channel) { - blocks[channel] = new RunLengthEncodedBlock(page.getBlock(channel).getSingleValueBlock(0), page.getPositionCount()); + blocks[channel] = RunLengthEncodedBlock.create(page.getBlock(channel).getSingleValueBlock(0), page.getPositionCount()); } pages.add(new Page(blocks)); } @@ -264,7 +264,7 @@ else if (pageCount % 3 == 1) { int[] positions = IntStream.range(0, page.getPositionCount()).toArray(); Block[] blocks = new Block[page.getChannelCount()]; for (int channel = 0; channel < page.getChannelCount(); ++channel) { - blocks[channel] = new DictionaryBlock(page.getBlock(channel), positions); + blocks[channel] = DictionaryBlock.create(positions.length, page.getBlock(channel), positions); } pages.add(new Page(blocks)); } diff --git a/core/trino-main/src/test/java/io/trino/operator/BenchmarkGroupByHashOnSimulatedData.java b/core/trino-main/src/test/java/io/trino/operator/BenchmarkGroupByHashOnSimulatedData.java index a67c18e8e9fd..c69f64cb96bc 100644 --- a/core/trino-main/src/test/java/io/trino/operator/BenchmarkGroupByHashOnSimulatedData.java +++ b/core/trino-main/src/test/java/io/trino/operator/BenchmarkGroupByHashOnSimulatedData.java @@ -556,7 +556,7 @@ private void createDictionaryBlock(int blockCount, int positionsPerBlock, int ch } } - blocks[i] = new DictionaryBlock(dictionary, indexes); + blocks[i] = DictionaryBlock.create(indexes.length, dictionary, indexes); } } diff --git a/core/trino-main/src/test/java/io/trino/operator/TestAggregationOperator.java b/core/trino-main/src/test/java/io/trino/operator/TestAggregationOperator.java index 3cf73b2302d0..19f90fb2c0ec 100644 --- a/core/trino-main/src/test/java/io/trino/operator/TestAggregationOperator.java +++ b/core/trino-main/src/test/java/io/trino/operator/TestAggregationOperator.java @@ -132,7 +132,7 @@ public void testDistinctMaskWithNulls() Optional.of(new boolean[] {true, true, true, true}), /* all positions are null */ new byte[] {1, 1, 1, 1}); /* non-zero value is true, all masks are true */ - Block trueNullRleMask = new RunLengthEncodedBlock(trueMaskAllNull.getSingleValueBlock(0), 4); + Block trueNullRleMask = RunLengthEncodedBlock.create(trueMaskAllNull.getSingleValueBlock(0), 4); List nullTrueMaskInput = ImmutableList.of( new Page(4, createLongsBlock(1, 2, 3, 4), trueMaskAllNull), diff --git a/core/trino-main/src/test/java/io/trino/operator/TestGroupByHash.java b/core/trino-main/src/test/java/io/trino/operator/TestGroupByHash.java index f9c5af9c9597..c3d25005f3bb 100644 --- a/core/trino-main/src/test/java/io/trino/operator/TestGroupByHash.java +++ b/core/trino-main/src/test/java/io/trino/operator/TestGroupByHash.java @@ -22,7 +22,6 @@ import io.trino.spi.PageBuilder; import io.trino.spi.block.Block; import io.trino.spi.block.DictionaryBlock; -import io.trino.spi.block.DictionaryId; import io.trino.spi.block.LongArrayBlock; import io.trino.spi.block.RunLengthEncodedBlock; import io.trino.spi.block.VariableWidthBlock; @@ -49,7 +48,6 @@ import static io.trino.block.BlockAssertions.createStringSequenceBlock; import static io.trino.operator.GroupByHash.createGroupByHash; import static io.trino.operator.UpdateMemory.NOOP; -import static io.trino.spi.block.DictionaryId.randomDictionaryId; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.VarcharType.VARCHAR; @@ -146,8 +144,8 @@ public void testRunLengthEncodedInputPage(GroupByHashType groupByHashType) Block block = BlockAssertions.createLongsBlock(0L); Block hashBlock = TypeTestUtils.getHashBlock(ImmutableList.of(BIGINT), block); Page page = new Page( - new RunLengthEncodedBlock(block, 2), - new RunLengthEncodedBlock(hashBlock, 2)); + RunLengthEncodedBlock.create(block, 2), + RunLengthEncodedBlock.create(hashBlock, 2)); groupByHash.addPage(page).process(); @@ -175,8 +173,8 @@ public void testDictionaryInputPage(GroupByHashType groupByHashType) Block hashBlock = TypeTestUtils.getHashBlock(ImmutableList.of(BIGINT), block); int[] ids = new int[] {0, 0, 1, 1}; Page page = new Page( - new DictionaryBlock(block, ids), - new DictionaryBlock(hashBlock, ids)); + DictionaryBlock.create(ids.length, block, ids), + DictionaryBlock.create(ids.length, hashBlock, ids)); groupByHash.addPage(page).process(); @@ -475,9 +473,8 @@ public void testMemoryReservationYieldWithDictionary(GroupByHashType groupByHash int dictionaryLength = 1_000; int length = 2_000_000; int[] ids = IntStream.range(0, dictionaryLength).toArray(); - DictionaryId dictionaryId = randomDictionaryId(); - Block valuesBlock = new DictionaryBlock(dictionaryLength, createLongSequenceBlock(0, length), ids, dictionaryId); - Block hashBlock = new DictionaryBlock(dictionaryLength, getHashBlock(ImmutableList.of(BIGINT), valuesBlock), ids, dictionaryId); + Block valuesBlock = DictionaryBlock.create(dictionaryLength, createLongSequenceBlock(0, length), ids); + Block hashBlock = DictionaryBlock.create(dictionaryLength, getHashBlock(ImmutableList.of(BIGINT), valuesBlock), ids); Page page = new Page(valuesBlock, hashBlock); AtomicInteger currentQuota = new AtomicInteger(0); AtomicInteger allowedQuota = new AtomicInteger(3); @@ -637,8 +634,8 @@ public void testLowCardinalityDictionariesProperGroupIdOrder() for (int i = 0; i < 16; i++) { ids[i] = 1; } - Block block1 = new DictionaryBlock(dictionary, ids); - Block block2 = new DictionaryBlock(dictionary, ids); + Block block1 = DictionaryBlock.create(ids.length, dictionary, ids); + Block block2 = DictionaryBlock.create(ids.length, dictionary, ids); Page page = new Page(block1, block2); @@ -661,10 +658,10 @@ public void testProperWorkTypesSelected() { Block bigintBlock = BlockAssertions.createLongsBlock(1, 2, 3, 4, 5, 6, 7, 8); Block bigintDictionaryBlock = BlockAssertions.createLongDictionaryBlock(0, 8); - Block bigintRleBlock = BlockAssertions.createRLEBlock(42, 8); + Block bigintRleBlock = BlockAssertions.createRepeatedValuesBlock(42, 8); Block varcharBlock = BlockAssertions.createStringsBlock("1", "2", "3", "4", "5", "6", "7", "8"); Block varcharDictionaryBlock = BlockAssertions.createStringDictionaryBlock(1, 8); - Block varcharRleBlock = new RunLengthEncodedBlock(new VariableWidthBlock(1, Slices.EMPTY_SLICE, new int[] {0, 1}, Optional.empty()), 8); + Block varcharRleBlock = RunLengthEncodedBlock.create(new VariableWidthBlock(1, Slices.EMPTY_SLICE, new int[] {0, 1}, Optional.empty()), 8); Block bigintBigDictionaryBlock = BlockAssertions.createLongDictionaryBlock(1, 8, 1000); Block bigintSingletonDictionaryBlock = BlockAssertions.createLongDictionaryBlock(1, 500000, 1); Block bigintHugeDictionaryBlock = BlockAssertions.createLongDictionaryBlock(1, 500000, 66000); // Above Short.MAX_VALUE diff --git a/core/trino-main/src/test/java/io/trino/operator/TestHashAggregationOperator.java b/core/trino-main/src/test/java/io/trino/operator/TestHashAggregationOperator.java index 8c3fa8246c35..503dc71662c5 100644 --- a/core/trino-main/src/test/java/io/trino/operator/TestHashAggregationOperator.java +++ b/core/trino-main/src/test/java/io/trino/operator/TestHashAggregationOperator.java @@ -69,7 +69,7 @@ import static io.trino.RowPagesBuilder.rowPagesBuilder; import static io.trino.SessionTestUtils.TEST_SESSION; import static io.trino.block.BlockAssertions.createLongsBlock; -import static io.trino.block.BlockAssertions.createRLEBlock; +import static io.trino.block.BlockAssertions.createRepeatedValuesBlock; import static io.trino.operator.GroupByHashYieldAssertion.GroupByHashYieldResult; import static io.trino.operator.GroupByHashYieldAssertion.createPagesWithDistinctHashKeys; import static io.trino.operator.GroupByHashYieldAssertion.finishOperatorWithYieldingGroupByHash; @@ -744,11 +744,11 @@ public void testAdaptivePartialAggregation() // First operator will trigger adaptive partial aggregation after the first page List operator1Input = rowPagesBuilder(false, hashChannels, BIGINT) .addBlocksPage(createLongsBlock(0, 1, 2, 3, 4, 5, 6, 7, 8, 8)) // first page will be hashed but the values are almost unique, so it will trigger adaptation - .addBlocksPage(createRLEBlock(1, 10)) // second page would be hashed to existing value 1. but if adaptive PA kicks in, the raw values will be passed on + .addBlocksPage(createRepeatedValuesBlock(1, 10)) // second page would be hashed to existing value 1. but if adaptive PA kicks in, the raw values will be passed on .build(); List operator1Expected = rowPagesBuilder(BIGINT, BIGINT) .addBlocksPage(createLongsBlock(0, 1, 2, 3, 4, 5, 6, 7, 8), createLongsBlock(0, 1, 2, 3, 4, 5, 6, 7, 8)) // the last position was aggregated - .addBlocksPage(createRLEBlock(1, 10), createRLEBlock(1, 10)) // we are expecting second page with raw values + .addBlocksPage(createRepeatedValuesBlock(1, 10), createRepeatedValuesBlock(1, 10)) // we are expecting second page with raw values .build(); assertOperatorEquals(operatorFactory, operator1Input, operator1Expected); @@ -756,12 +756,12 @@ public void testAdaptivePartialAggregation() assertTrue(partialAggregationController.isPartialAggregationDisabled()); // second operator using the same factory, reuses PartialAggregationControl, so it will only produce raw pages (partial aggregation is disabled at this point) List operator2Input = rowPagesBuilder(false, hashChannels, BIGINT) - .addBlocksPage(createRLEBlock(1, 10)) - .addBlocksPage(createRLEBlock(2, 10)) + .addBlocksPage(createRepeatedValuesBlock(1, 10)) + .addBlocksPage(createRepeatedValuesBlock(2, 10)) .build(); List operator2Expected = rowPagesBuilder(BIGINT, BIGINT) - .addBlocksPage(createRLEBlock(1, 10), createRLEBlock(1, 10)) - .addBlocksPage(createRLEBlock(2, 10), createRLEBlock(2, 10)) + .addBlocksPage(createRepeatedValuesBlock(1, 10), createRepeatedValuesBlock(1, 10)) + .addBlocksPage(createRepeatedValuesBlock(2, 10), createRepeatedValuesBlock(2, 10)) .build(); assertOperatorEquals(operatorFactory, operator2Input, operator2Expected); @@ -792,7 +792,7 @@ public void testAdaptivePartialAggregationTriggeredOnlyOnFlush() List operator1Input = rowPagesBuilder(false, hashChannels, BIGINT) .addSequencePage(10, 0) // first page are unique values, so it would trigger adaptation, but it won't because flush is not called - .addBlocksPage(createRLEBlock(1, 2)) // second page will be hashed to existing value 1 + .addBlocksPage(createRepeatedValuesBlock(1, 2)) // second page will be hashed to existing value 1 .build(); // the total unique ows ratio for the first operator will be 10/12 so > 0.8 (adaptive partial aggregation uniqueRowsRatioThreshold) List operator1Expected = rowPagesBuilder(BIGINT, BIGINT) @@ -805,12 +805,12 @@ public void testAdaptivePartialAggregationTriggeredOnlyOnFlush() // second operator using the same factory, reuses PartialAggregationControl, so it will only produce raw pages (partial aggregation is disabled at this point) List operator2Input = rowPagesBuilder(false, hashChannels, BIGINT) - .addBlocksPage(createRLEBlock(1, 10)) - .addBlocksPage(createRLEBlock(2, 10)) + .addBlocksPage(createRepeatedValuesBlock(1, 10)) + .addBlocksPage(createRepeatedValuesBlock(2, 10)) .build(); List operator2Expected = rowPagesBuilder(BIGINT, BIGINT) - .addBlocksPage(createRLEBlock(1, 10), createRLEBlock(1, 10)) - .addBlocksPage(createRLEBlock(2, 10), createRLEBlock(2, 10)) + .addBlocksPage(createRepeatedValuesBlock(1, 10), createRepeatedValuesBlock(1, 10)) + .addBlocksPage(createRepeatedValuesBlock(2, 10), createRepeatedValuesBlock(2, 10)) .build(); assertOperatorEquals(operatorFactory, operator2Input, operator2Expected); diff --git a/core/trino-main/src/test/java/io/trino/operator/TestPageUtils.java b/core/trino-main/src/test/java/io/trino/operator/TestPageUtils.java index 8ce5df856306..698b7aa0c18c 100644 --- a/core/trino-main/src/test/java/io/trino/operator/TestPageUtils.java +++ b/core/trino-main/src/test/java/io/trino/operator/TestPageUtils.java @@ -50,8 +50,8 @@ public void testRecordMaterializedBytes() public void testNestedBlocks() { Block elements = lazyWrapper(createIntsBlock(1, 2, 3)); - DictionaryBlock dictBlock = new DictionaryBlock(elements, new int[] {0}); - Page page = new Page(1, dictBlock); + Block dictBlock = DictionaryBlock.create(2, elements, new int[] {0, 0}); + Page page = new Page(2, dictBlock); AtomicLong sizeInBytes = new AtomicLong(); recordMaterializedBytes(page, sizeInBytes::getAndAdd); diff --git a/core/trino-main/src/test/java/io/trino/operator/aggregation/AggregationTestUtils.java b/core/trino-main/src/test/java/io/trino/operator/aggregation/AggregationTestUtils.java index 92bcfb2aeed8..4a903227c035 100644 --- a/core/trino-main/src/test/java/io/trino/operator/aggregation/AggregationTestUtils.java +++ b/core/trino-main/src/test/java/io/trino/operator/aggregation/AggregationTestUtils.java @@ -175,7 +175,7 @@ private static Page[] maskPagesWithRle(boolean maskValue, Page... pages) Page[] maskedPages = new Page[pages.length]; for (int i = 0; i < pages.length; i++) { Page page = pages[i]; - maskedPages[i] = page.appendColumn(new RunLengthEncodedBlock(BooleanType.createBlockForSingleNonNullValue(maskValue), page.getPositionCount())); + maskedPages[i] = page.appendColumn(RunLengthEncodedBlock.create(BooleanType.createBlockForSingleNonNullValue(maskValue), page.getPositionCount())); } return maskedPages; } @@ -416,7 +416,7 @@ public static Page[] offsetColumns(Page[] pages, int offset) Page page = pages[i]; Block[] newBlocks = new Block[page.getChannelCount() + offset]; for (int channel = 0; channel < offset; channel++) { - newBlocks[channel] = createNullRLEBlock(page.getPositionCount()); + newBlocks[channel] = createAllNullBlock(page.getPositionCount()); } for (int channel = 0; channel < page.getChannelCount(); channel++) { newBlocks[channel + offset] = page.getBlock(channel); @@ -426,9 +426,9 @@ public static Page[] offsetColumns(Page[] pages, int offset) return newPages; } - private static RunLengthEncodedBlock createNullRLEBlock(int positionCount) + private static Block createAllNullBlock(int positionCount) { - return (RunLengthEncodedBlock) RunLengthEncodedBlock.create(BOOLEAN, null, positionCount); + return RunLengthEncodedBlock.create(BOOLEAN, null, positionCount); } public static Object getGroupValue(Type finalType, GroupedAggregator groupedAggregator, int groupId) diff --git a/core/trino-main/src/test/java/io/trino/operator/aggregation/TestApproximatePercentileAggregation.java b/core/trino-main/src/test/java/io/trino/operator/aggregation/TestApproximatePercentileAggregation.java index 0fa73ee8eb68..ba207ee9892f 100644 --- a/core/trino-main/src/test/java/io/trino/operator/aggregation/TestApproximatePercentileAggregation.java +++ b/core/trino-main/src/test/java/io/trino/operator/aggregation/TestApproximatePercentileAggregation.java @@ -15,6 +15,7 @@ import com.google.common.collect.ImmutableList; import io.trino.metadata.TestingFunctionResolution; +import io.trino.spi.block.Block; import io.trino.spi.block.BlockBuilder; import io.trino.spi.block.RunLengthEncodedBlock; import io.trino.spi.type.ArrayType; @@ -72,7 +73,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE, null, createLongsBlock(null, null), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -80,7 +81,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE, 1L, createLongsBlock(null, 1L), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -88,7 +89,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE, 2L, createLongsBlock(null, 1L, 2L, 3L), - createRLEBlock(0.5, 4)); + createRleBlock(0.5, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -96,7 +97,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE, 2L, createLongsBlock(1L, 2L, 3L), - createRLEBlock(0.5, 3)); + createRleBlock(0.5, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -104,7 +105,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE, 3L, createLongsBlock(1L, null, 2L, 2L, null, 2L, 2L, null, 2L, 2L, null, 3L, 3L, null, 3L, null, 3L, 4L, 5L, 6L, 7L), - createRLEBlock(0.5, 21)); + createRleBlock(0.5, 21)); // array of approx_percentile assertAggregation( @@ -113,7 +114,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE_ARRAY, null, createLongsBlock(null, null), - createRLEBlock(ImmutableList.of(0.5), 2)); + createRleBlock(ImmutableList.of(0.5), 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -121,7 +122,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE_ARRAY, null, createLongsBlock(null, null), - createRLEBlock(ImmutableList.of(0.5, 0.99), 2)); + createRleBlock(ImmutableList.of(0.5, 0.99), 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -129,7 +130,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(1L, 1L), createLongsBlock(null, 1L), - createRLEBlock(ImmutableList.of(0.5, 0.5), 2)); + createRleBlock(ImmutableList.of(0.5, 0.5), 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -137,7 +138,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(1L, 2L, 3L), createLongsBlock(null, 1L, 2L, 3L), - createRLEBlock(ImmutableList.of(0.2, 0.5, 0.8), 4)); + createRleBlock(ImmutableList.of(0.2, 0.5, 0.8), 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -145,7 +146,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(2L, 3L), createLongsBlock(1L, 2L, 3L), - createRLEBlock(ImmutableList.of(0.5, 0.99), 3)); + createRleBlock(ImmutableList.of(0.5, 0.99), 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -153,7 +154,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(1L, 3L), createLongsBlock(1L, null, 2L, 2L, null, 2L, 2L, null, 2L, 2L, null, 3L, 3L, null, 3L, null, 3L, 4L, 5L, 6L, 7L), - createRLEBlock(ImmutableList.of(0.01, 0.5), 21)); + createRleBlock(ImmutableList.of(0.01, 0.5), 21)); // unsorted percentiles assertAggregation( @@ -162,7 +163,7 @@ public void testLongPartialStep() LONG_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(3L, 1L, 2L), createLongsBlock(null, 1L, 2L, 3L), - createRLEBlock(ImmutableList.of(0.8, 0.2, 0.5), 4)); + createRleBlock(ImmutableList.of(0.8, 0.2, 0.5), 4)); // weighted approx_percentile assertAggregation( @@ -172,7 +173,7 @@ public void testLongPartialStep() null, createLongsBlock(null, null), createLongsBlock(1L, 1L), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -181,7 +182,7 @@ public void testLongPartialStep() 1L, createLongsBlock(null, 1L), createDoublesBlock(1.0, 1.0), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -190,7 +191,7 @@ public void testLongPartialStep() 2L, createLongsBlock(null, 1L, 2L, 3L), createDoublesBlock(1.0, 1.0, 1.0, 1.0), - createRLEBlock(0.5, 4)); + createRleBlock(0.5, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -199,7 +200,7 @@ public void testLongPartialStep() 2L, createLongsBlock(1L, 2L, 3L), createDoublesBlock(1.0, 1.0, 1.0), - createRLEBlock(0.5, 3)); + createRleBlock(0.5, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -208,7 +209,7 @@ public void testLongPartialStep() 2L, createLongsBlock(1L, 2L, 3L), createDoublesBlock(23.4, 23.4, 23.4), - createRLEBlock(0.5, 3)); + createRleBlock(0.5, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -217,7 +218,7 @@ public void testLongPartialStep() 3L, createLongsBlock(1L, null, 2L, null, 2L, null, 2L, null, 3L, null, 3L, null, 3L, 4L, 5L, 6L, 7L), createDoublesBlock(1.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0), - createRLEBlock(0.5, 17)); + createRleBlock(0.5, 17)); assertAggregation( FUNCTION_RESOLUTION, @@ -226,7 +227,7 @@ public void testLongPartialStep() 3L, createLongsBlock(1L, null, 2L, null, 2L, null, 2L, null, 3L, null, 3L, null, 3L, 4L, 5L, 6L, 7L), createDoublesBlock(1.1, 1.1, 2.2, 1.1, 2.2, 1.1, 2.2, 1.1, 2.2, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1), - createRLEBlock(0.5, 17)); + createRleBlock(0.5, 17)); assertAggregation( FUNCTION_RESOLUTION, @@ -235,8 +236,8 @@ public void testLongPartialStep() 9900L, createLongSequenceBlock(0, 10000), createDoubleRepeatBlock(1.0, 10000), - createRLEBlock(0.99, 10000), - createRLEBlock(0.001, 10000)); + createRleBlock(0.99, 10000), + createRleBlock(0.001, 10000)); // weighted + array of approx_percentile assertAggregation( @@ -246,7 +247,7 @@ public void testLongPartialStep() ImmutableList.of(2L, 3L), createLongsBlock(1L, 2L, 3L), createDoublesBlock(4.0, 2.0, 1.0), - createRLEBlock(ImmutableList.of(0.5, 0.8), 3)); + createRleBlock(ImmutableList.of(0.5, 0.8), 3)); } @Test @@ -259,7 +260,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE, null, createBlockOfReals(null, null), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -267,7 +268,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE, 1.0f, createBlockOfReals(null, 1.0f), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -275,7 +276,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE, 2.0f, createBlockOfReals(null, 1.0f, 2.0f, 3.0f), - createRLEBlock(0.5, 4)); + createRleBlock(0.5, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -283,7 +284,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE, 1.0f, createBlockOfReals(-1.0f, 1.0f), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -291,7 +292,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE, -1.0f, createBlockOfReals(-2.0f, 3.0f, -1.0f), - createRLEBlock(0.5, 3)); + createRleBlock(0.5, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -299,7 +300,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE, 2.0f, createBlockOfReals(1.0f, 2.0f, 3.0f), - createRLEBlock(0.5, 3)); + createRleBlock(0.5, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -307,7 +308,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE, 3.0f, createBlockOfReals(1.0f, null, 2.0f, 2.0f, null, 2.0f, 2.0f, null, 2.0f, 2.0f, null, 3.0f, 3.0f, null, 3.0f, null, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f), - createRLEBlock(0.5, 21)); + createRleBlock(0.5, 21)); // array of approx_percentile assertAggregation( @@ -316,7 +317,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE_ARRAY, null, createBlockOfReals(null, null), - createRLEBlock(ImmutableList.of(0.5), 2)); + createRleBlock(ImmutableList.of(0.5), 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -324,7 +325,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE_ARRAY, null, createBlockOfReals(null, null), - createRLEBlock(ImmutableList.of(0.5, 0.5), 2)); + createRleBlock(ImmutableList.of(0.5, 0.5), 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -332,7 +333,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(1.0f, 1.0f), createBlockOfReals(null, 1.0f), - createRLEBlock(ImmutableList.of(0.5, 0.5), 2)); + createRleBlock(ImmutableList.of(0.5, 0.5), 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -340,7 +341,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(1.0f, 2.0f, 3.0f), createBlockOfReals(null, 1.0f, 2.0f, 3.0f), - createRLEBlock(ImmutableList.of(0.2, 0.5, 0.8), 4)); + createRleBlock(ImmutableList.of(0.2, 0.5, 0.8), 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -348,7 +349,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(2.0f, 3.0f), createBlockOfReals(1.0f, 2.0f, 3.0f), - createRLEBlock(ImmutableList.of(0.5, 0.99), 3)); + createRleBlock(ImmutableList.of(0.5, 0.99), 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -356,7 +357,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(1.0f, 3.0f), createBlockOfReals(1.0f, null, 2.0f, 2.0f, null, 2.0f, 2.0f, null, 2.0f, 2.0f, null, 3.0f, 3.0f, null, 3.0f, null, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f), - createRLEBlock(ImmutableList.of(0.01, 0.5), 21)); + createRleBlock(ImmutableList.of(0.01, 0.5), 21)); // unsorted percentiles assertAggregation( @@ -365,7 +366,7 @@ public void testFloatPartialStep() FLOAT_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(3.0f, 1.0f, 2.0f), createBlockOfReals(null, 1.0f, 2.0f, 3.0f), - createRLEBlock(ImmutableList.of(0.8, 0.2, 0.5), 4)); + createRleBlock(ImmutableList.of(0.8, 0.2, 0.5), 4)); // weighted approx_percentile assertAggregation( @@ -375,7 +376,7 @@ public void testFloatPartialStep() null, createBlockOfReals(null, null), createLongsBlock(1L, 1L), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -384,7 +385,7 @@ public void testFloatPartialStep() 1.0f, createBlockOfReals(null, 1.0f), createDoublesBlock(1.0, 1.0), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -393,7 +394,7 @@ public void testFloatPartialStep() 2.0f, createBlockOfReals(null, 1.0f, 2.0f, 3.0f), createDoublesBlock(1.0, 1.0, 1.0, 1.0), - createRLEBlock(0.5, 4)); + createRleBlock(0.5, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -402,7 +403,7 @@ public void testFloatPartialStep() 2.0f, createBlockOfReals(1.0f, 2.0f, 3.0f), createDoublesBlock(1.0, 1.0, 1.0), - createRLEBlock(0.5, 3)); + createRleBlock(0.5, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -411,7 +412,7 @@ public void testFloatPartialStep() 2.75f, createBlockOfReals(1.0f, null, 2.0f, null, 2.0f, null, 2.0f, null, 3.0f, null, 3.0f, null, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f), createDoublesBlock(1.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0), - createRLEBlock(0.5, 17)); + createRleBlock(0.5, 17)); assertAggregation( FUNCTION_RESOLUTION, @@ -420,7 +421,7 @@ public void testFloatPartialStep() 2.75f, createBlockOfReals(1.0f, null, 2.0f, null, 2.0f, null, 2.0f, null, 3.0f, null, 3.0f, null, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f), createDoublesBlock(1.1, 1.1, 2.2, 1.1, 2.2, 1.1, 2.2, 1.1, 2.2, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1), - createRLEBlock(0.5, 17)); + createRleBlock(0.5, 17)); assertAggregation( FUNCTION_RESOLUTION, @@ -429,8 +430,8 @@ public void testFloatPartialStep() 9900.0f, createSequenceBlockOfReal(0, 10000), createDoubleRepeatBlock(1, 10000), - createRLEBlock(0.99, 10000), - createRLEBlock(0.001, 10000)); + createRleBlock(0.99, 10000), + createRleBlock(0.001, 10000)); // weighted + array of approx_percentile assertAggregation( @@ -440,7 +441,7 @@ public void testFloatPartialStep() ImmutableList.of(1.5f, 2.6f), createBlockOfReals(1.0f, 2.0f, 3.0f), createDoublesBlock(4.0, 2.0, 1.0), - createRLEBlock(ImmutableList.of(0.5, 0.8), 3)); + createRleBlock(ImmutableList.of(0.5, 0.8), 3)); } @Test @@ -453,7 +454,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE, null, createDoublesBlock(null, null), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -461,7 +462,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE, 1.0, createDoublesBlock(null, 1.0), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -469,7 +470,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE, 2.0, createDoublesBlock(null, 1.0, 2.0, 3.0), - createRLEBlock(0.5, 4)); + createRleBlock(0.5, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -477,7 +478,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE, 2.0, createDoublesBlock(1.0, 2.0, 3.0), - createRLEBlock(0.5, 3)); + createRleBlock(0.5, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -485,7 +486,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE, 3.0, createDoublesBlock(1.0, null, 2.0, 2.0, null, 2.0, 2.0, null, 2.0, 2.0, null, 3.0, 3.0, null, 3.0, null, 3.0, 4.0, 5.0, 6.0, 7.0), - createRLEBlock(0.5, 21)); + createRleBlock(0.5, 21)); // array of approx_percentile assertAggregation( @@ -494,7 +495,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE_ARRAY, null, createDoublesBlock(null, null), - createRLEBlock(ImmutableList.of(0.5), 2)); + createRleBlock(ImmutableList.of(0.5), 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -502,7 +503,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE_ARRAY, null, createDoublesBlock(null, null), - createRLEBlock(ImmutableList.of(0.5, 0.5), 2)); + createRleBlock(ImmutableList.of(0.5, 0.5), 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -510,7 +511,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(1.0, 1.0), createDoublesBlock(null, 1.0), - createRLEBlock(ImmutableList.of(0.5, 0.5), 2)); + createRleBlock(ImmutableList.of(0.5, 0.5), 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -518,7 +519,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(1.0, 2.0, 3.0), createDoublesBlock(null, 1.0, 2.0, 3.0), - createRLEBlock(ImmutableList.of(0.2, 0.5, 0.8), 4)); + createRleBlock(ImmutableList.of(0.2, 0.5, 0.8), 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -526,7 +527,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(2.0, 3.0), createDoublesBlock(1.0, 2.0, 3.0), - createRLEBlock(ImmutableList.of(0.5, 0.99), 3)); + createRleBlock(ImmutableList.of(0.5, 0.99), 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -534,7 +535,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(1.0, 3.0), createDoublesBlock(1.0, null, 2.0, 2.0, null, 2.0, 2.0, null, 2.0, 2.0, null, 3.0, 3.0, null, 3.0, null, 3.0, 4.0, 5.0, 6.0, 7.0), - createRLEBlock(ImmutableList.of(0.01, 0.5), 21)); + createRleBlock(ImmutableList.of(0.01, 0.5), 21)); // unsorted percentiles assertAggregation( @@ -543,7 +544,7 @@ public void testDoublePartialStep() DOUBLE_APPROXIMATE_PERCENTILE_ARRAY, ImmutableList.of(3.0, 1.0, 2.0), createDoublesBlock(null, 1.0, 2.0, 3.0), - createRLEBlock(ImmutableList.of(0.8, 0.2, 0.5), 4)); + createRleBlock(ImmutableList.of(0.8, 0.2, 0.5), 4)); // weighted approx_percentile assertAggregation( @@ -553,7 +554,7 @@ public void testDoublePartialStep() null, createDoublesBlock(null, null), createLongsBlock(1L, 1L), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -562,7 +563,7 @@ public void testDoublePartialStep() 1.0, createDoublesBlock(null, 1.0), createDoublesBlock(1.0, 1.0), - createRLEBlock(0.5, 2)); + createRleBlock(0.5, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -571,7 +572,7 @@ public void testDoublePartialStep() 2.0, createDoublesBlock(null, 1.0, 2.0, 3.0), createDoublesBlock(1.0, 1.0, 1.0, 1.0), - createRLEBlock(0.5, 4)); + createRleBlock(0.5, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -580,7 +581,7 @@ public void testDoublePartialStep() 2.0, createDoublesBlock(1.0, 2.0, 3.0), createDoublesBlock(1.0, 1.0, 1.0), - createRLEBlock(0.5, 3)); + createRleBlock(0.5, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -589,7 +590,7 @@ public void testDoublePartialStep() 2.75, createDoublesBlock(1.0, null, 2.0, null, 2.0, null, 2.0, null, 3.0, null, 3.0, null, 3.0, 4.0, 5.0, 6.0, 7.0), createDoublesBlock(1.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0), - createRLEBlock(0.5, 17)); + createRleBlock(0.5, 17)); assertAggregation( FUNCTION_RESOLUTION, @@ -598,7 +599,7 @@ public void testDoublePartialStep() 2.75, createDoublesBlock(1.0, null, 2.0, null, 2.0, null, 2.0, null, 3.0, null, 3.0, null, 3.0, 4.0, 5.0, 6.0, 7.0), createDoublesBlock(1.1, 1.1, 2.2, 1.1, 2.2, 1.1, 2.2, 1.1, 2.2, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1), - createRLEBlock(0.5, 17)); + createRleBlock(0.5, 17)); assertAggregation( FUNCTION_RESOLUTION, @@ -607,8 +608,8 @@ public void testDoublePartialStep() 9900.0, createDoubleSequenceBlock(0, 10000), createDoubleRepeatBlock(1.0, 10000), - createRLEBlock(0.99, 10000), - createRLEBlock(0.001, 10000)); + createRleBlock(0.99, 10000), + createRleBlock(0.001, 10000)); // weighted + array of approx_percentile assertAggregation( @@ -618,17 +619,17 @@ public void testDoublePartialStep() ImmutableList.of(1.5, 2.6000000000000005), createDoublesBlock(1.0, 2.0, 3.0), createDoublesBlock(4.0, 2.0, 1.0), - createRLEBlock(ImmutableList.of(0.5, 0.8), 3)); + createRleBlock(ImmutableList.of(0.5, 0.8), 3)); } - private static RunLengthEncodedBlock createRLEBlock(double percentile, int positionCount) + private static Block createRleBlock(double percentile, int positionCount) { BlockBuilder blockBuilder = DOUBLE.createBlockBuilder(null, 1); DOUBLE.writeDouble(blockBuilder, percentile); - return new RunLengthEncodedBlock(blockBuilder.build(), positionCount); + return RunLengthEncodedBlock.create(blockBuilder.build(), positionCount); } - private static RunLengthEncodedBlock createRLEBlock(Iterable percentiles, int positionCount) + private static Block createRleBlock(Iterable percentiles, int positionCount) { BlockBuilder rleBlockBuilder = new ArrayType(DOUBLE).createBlockBuilder(null, 1); BlockBuilder arrayBlockBuilder = rleBlockBuilder.beginBlockEntry(); @@ -639,6 +640,6 @@ private static RunLengthEncodedBlock createRLEBlock(Iterable percentiles rleBlockBuilder.closeEntry(); - return new RunLengthEncodedBlock(rleBlockBuilder.build(), positionCount); + return RunLengthEncodedBlock.create(rleBlockBuilder.build(), positionCount); } } diff --git a/core/trino-main/src/test/java/io/trino/operator/aggregation/TestQuantileDigestAggregationFunction.java b/core/trino-main/src/test/java/io/trino/operator/aggregation/TestQuantileDigestAggregationFunction.java index a13f13c377fe..04bd6f07ed47 100644 --- a/core/trino-main/src/test/java/io/trino/operator/aggregation/TestQuantileDigestAggregationFunction.java +++ b/core/trino-main/src/test/java/io/trino/operator/aggregation/TestQuantileDigestAggregationFunction.java @@ -16,6 +16,7 @@ import com.google.common.base.Joiner; import com.google.common.primitives.Floats; import io.airlift.stats.QuantileDigest; +import io.trino.block.BlockAssertions; import io.trino.metadata.TestingFunctionResolution; import io.trino.operator.scalar.AbstractTestFunctions; import io.trino.spi.Page; @@ -39,7 +40,7 @@ import static io.trino.block.BlockAssertions.createDoublesBlock; import static io.trino.block.BlockAssertions.createLongSequenceBlock; import static io.trino.block.BlockAssertions.createLongsBlock; -import static io.trino.block.BlockAssertions.createRLEBlock; +import static io.trino.block.BlockAssertions.createRepeatedValuesBlock; import static io.trino.block.BlockAssertions.createSequenceBlockOfReal; import static io.trino.operator.aggregation.AggregationTestUtils.assertAggregation; import static io.trino.operator.aggregation.FloatingPointBitsConverterUtil.doubleToSortableLong; @@ -67,31 +68,31 @@ public void testDoublesWithWeights() { testAggregationDouble( createDoublesBlock(1.0, null, 2.0, null, 3.0, null, 4.0, null, 5.0, null), - createRLEBlock(1, 10), + createRepeatedValuesBlock(1, 10), 0.01, 1.0, 2.0, 3.0, 4.0, 5.0); testAggregationDouble( createDoublesBlock(null, null, null, null, null), - createRLEBlock(1, 5), + createRepeatedValuesBlock(1, 5), NaN); testAggregationDouble( createDoublesBlock(-1.0, -2.0, -3.0, -4.0, -5.0, -6.0, -7.0, -8.0, -9.0, -10.0), - createRLEBlock(1, 10), + createRepeatedValuesBlock(1, 10), 0.01, -1.0, -2.0, -3.0, -4.0, -5.0, -6.0, -7.0, -8.0, -9.0, -10.0); testAggregationDouble( createDoublesBlock(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0), - createRLEBlock(1, 10), + createRepeatedValuesBlock(1, 10), 0.01, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0); testAggregationDouble( createDoublesBlock(), - createRLEBlock(1, 0), + createRepeatedValuesBlock(1, 0), NaN); testAggregationDouble( createDoublesBlock(1.0), - createRLEBlock(1, 1), + createRepeatedValuesBlock(1, 1), 0.01, 1.0); testAggregationDouble( createDoubleSequenceBlock(-1000, 1000), - createRLEBlock(1, 2000), + createRepeatedValuesBlock(1, 2000), 0.01, LongStream.range(-1000, 1000).asDoubleStream().toArray()); } @@ -101,31 +102,31 @@ public void testRealsWithWeights() { testAggregationReal( createBlockOfReals(1.0F, null, 2.0F, null, 3.0F, null, 4.0F, null, 5.0F, null), - createRLEBlock(1, 10), + createRepeatedValuesBlock(1, 10), 0.01, 1.0F, 2.0F, 3.0F, 4.0F, 5.0F); testAggregationReal( createBlockOfReals(null, null, null, null, null), - createRLEBlock(1, 5), + createRepeatedValuesBlock(1, 5), NaN); testAggregationReal( createBlockOfReals(-1.0F, -2.0F, -3.0F, -4.0F, -5.0F, -6.0F, -7.0F, -8.0F, -9.0F, -10.0F), - createRLEBlock(1, 10), + createRepeatedValuesBlock(1, 10), 0.01, -1.0F, -2.0F, -3.0F, -4.0F, -5.0F, -6.0F, -7.0F, -8.0F, -9.0F, -10.0F); testAggregationReal( createBlockOfReals(1.0F, 2.0F, 3.0F, 4.0F, 5.0F, 6.0F, 7.0F, 8.0F, 9.0F, 10.0F), - createRLEBlock(1, 10), + createRepeatedValuesBlock(1, 10), 0.01, 1.0F, 2.0F, 3.0F, 4.0F, 5.0F, 6.0F, 7.0F, 8.0F, 9.0F, 10.0F); testAggregationReal( createBlockOfReals(), - createRLEBlock(1, 0), + createRepeatedValuesBlock(1, 0), NaN); testAggregationReal( createBlockOfReals(1.0F), - createRLEBlock(1, 1), + createRepeatedValuesBlock(1, 1), 0.01, 1.0F); testAggregationReal( createSequenceBlockOfReal(-1000, 1000), - createRLEBlock(1, 2000), + createRepeatedValuesBlock(1, 2000), 0.01, Floats.toArray(LongStream.range(-1000, 1000).mapToObj(Float::new).collect(toImmutableList()))); } @@ -135,31 +136,31 @@ public void testBigintsWithWeight() { testAggregationBigint( createLongsBlock(1L, null, 2L, null, 3L, null, 4L, null, 5L, null), - createRLEBlock(1, 10), + createRepeatedValuesBlock(1, 10), 0.01, 1, 2, 3, 4, 5); testAggregationBigint( createLongsBlock(null, null, null, null, null), - createRLEBlock(1, 5), + createRepeatedValuesBlock(1, 5), NaN); testAggregationBigint( createLongsBlock(-1, -2, -3, -4, -5, -6, -7, -8, -9, -10), - createRLEBlock(1, 10), + createRepeatedValuesBlock(1, 10), 0.01, -1, -2, -3, -4, -5, -6, -7, -8, -9, -10); testAggregationBigint( createLongsBlock(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), - createRLEBlock(1, 10), + createRepeatedValuesBlock(1, 10), 0.01, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10); testAggregationBigint( createLongsBlock(new int[] {}), - createRLEBlock(1, 0), + createRepeatedValuesBlock(1, 0), NaN); testAggregationBigint( createLongsBlock(1), - createRLEBlock(1, 1), + createRepeatedValuesBlock(1, 1), 0.01, 1); testAggregationBigint( createLongSequenceBlock(-1000, 1000), - createRLEBlock(1, 2000), + createRepeatedValuesBlock(1, 2000), 0.01, LongStream.range(-1000, 1000).toArray()); } @@ -182,7 +183,7 @@ private void testAggregationBigint(Block inputBlock, Block weightsBlock, double // Test with weights and accuracy testAggregationBigints( fromTypes(BIGINT, BIGINT, DOUBLE), - new Page(inputBlock, weightsBlock, createRLEBlock(maxError, inputBlock.getPositionCount())), + new Page(inputBlock, weightsBlock, BlockAssertions.createRepeatedValuesBlock(maxError, inputBlock.getPositionCount())), maxError, inputs); } @@ -204,7 +205,7 @@ private void testAggregationReal(Block longsBlock, Block weightsBlock, double ma // Test with weights and accuracy testAggregationReal( fromTypes(REAL, BIGINT, DOUBLE), - new Page(longsBlock, weightsBlock, createRLEBlock(maxError, longsBlock.getPositionCount())), + new Page(longsBlock, weightsBlock, BlockAssertions.createRepeatedValuesBlock(maxError, longsBlock.getPositionCount())), maxError, inputs); } @@ -226,7 +227,7 @@ private void testAggregationDouble(Block longsBlock, Block weightsBlock, double // Test with weights and accuracy testAggregationDoubles( fromTypes(DOUBLE, BIGINT, DOUBLE), - new Page(longsBlock, weightsBlock, createRLEBlock(maxError, longsBlock.getPositionCount())), + new Page(longsBlock, weightsBlock, BlockAssertions.createRepeatedValuesBlock(maxError, longsBlock.getPositionCount())), maxError, inputs); } diff --git a/core/trino-main/src/test/java/io/trino/operator/aggregation/TestTDigestAggregationFunction.java b/core/trino-main/src/test/java/io/trino/operator/aggregation/TestTDigestAggregationFunction.java index c8e633e65ab5..daeafe01f5f1 100644 --- a/core/trino-main/src/test/java/io/trino/operator/aggregation/TestTDigestAggregationFunction.java +++ b/core/trino-main/src/test/java/io/trino/operator/aggregation/TestTDigestAggregationFunction.java @@ -15,6 +15,7 @@ import com.google.common.collect.ImmutableList; import io.airlift.stats.TDigest; +import io.trino.block.BlockAssertions; import io.trino.metadata.TestingFunctionResolution; import io.trino.operator.scalar.AbstractTestFunctions; import io.trino.spi.Page; @@ -31,7 +32,6 @@ import static io.airlift.slice.Slices.wrappedBuffer; import static io.trino.block.BlockAssertions.createDoubleSequenceBlock; import static io.trino.block.BlockAssertions.createDoublesBlock; -import static io.trino.block.BlockAssertions.createRLEBlock; import static io.trino.operator.aggregation.AggregationTestUtils.assertAggregation; import static io.trino.operator.scalar.TDigestFunctions.DEFAULT_WEIGHT; import static io.trino.spi.type.DoubleType.DOUBLE; @@ -71,7 +71,7 @@ public void testTdigestAggregationFunction() 1.0, 2.0, 3.0, 4.0, 5.0); testAggregation( createDoublesBlock(null, null, null, null, null), - createRLEBlock(1.0, 5), + BlockAssertions.createRepeatedValuesBlock(1.0, 5), ImmutableList.of()); testAggregation( createDoublesBlock(-1.0, -2.0, -3.0, -4.0, -5.0, -6.0, -7.0, -8.0, -9.0, -10.0), @@ -85,11 +85,11 @@ public void testTdigestAggregationFunction() 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0); testAggregation( createDoublesBlock(), - createRLEBlock(1.0, 0), + BlockAssertions.createRepeatedValuesBlock(1.0, 0), ImmutableList.of()); testAggregation( createDoublesBlock(1.0), - createRLEBlock(1.1, 1), + BlockAssertions.createRepeatedValuesBlock(1.1, 1), ImmutableList.of(1.1), 1.0); diff --git a/core/trino-main/src/test/java/io/trino/operator/aggregation/minmaxbyn/TestMinMaxByNAggregation.java b/core/trino-main/src/test/java/io/trino/operator/aggregation/minmaxbyn/TestMinMaxByNAggregation.java index 561769c41642..94b1bb265ddd 100644 --- a/core/trino-main/src/test/java/io/trino/operator/aggregation/minmaxbyn/TestMinMaxByNAggregation.java +++ b/core/trino-main/src/test/java/io/trino/operator/aggregation/minmaxbyn/TestMinMaxByNAggregation.java @@ -30,7 +30,7 @@ import static io.trino.block.BlockAssertions.createBlockOfReals; import static io.trino.block.BlockAssertions.createDoublesBlock; import static io.trino.block.BlockAssertions.createLongsBlock; -import static io.trino.block.BlockAssertions.createRLEBlock; +import static io.trino.block.BlockAssertions.createRepeatedValuesBlock; import static io.trino.block.BlockAssertions.createStringsBlock; import static io.trino.operator.aggregation.AggregationTestUtils.assertAggregation; import static io.trino.operator.aggregation.AggregationTestUtils.groupedAggregation; @@ -56,7 +56,7 @@ public void testMaxDoubleDouble() Arrays.asList((Double) null), createDoublesBlock(1.0, null), createDoublesBlock(3.0, 5.0), - createRLEBlock(1L, 2)); + createRepeatedValuesBlock(1L, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -65,7 +65,7 @@ public void testMaxDoubleDouble() null, createDoublesBlock(null, null), createDoublesBlock(null, null), - createRLEBlock(1L, 2)); + createRepeatedValuesBlock(1L, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -74,7 +74,7 @@ public void testMaxDoubleDouble() Arrays.asList(1.0), createDoublesBlock(null, 1.0, null, null), createDoublesBlock(null, 0.0, null, null), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -83,7 +83,7 @@ public void testMaxDoubleDouble() Arrays.asList(1.0), createDoublesBlock(1.0), createDoublesBlock(0.0), - createRLEBlock(2L, 1)); + createRepeatedValuesBlock(2L, 1)); assertAggregation( FUNCTION_RESOLUTION, @@ -92,7 +92,7 @@ public void testMaxDoubleDouble() null, createDoublesBlock(), createDoublesBlock(), - createRLEBlock(2L, 0)); + createRepeatedValuesBlock(2L, 0)); assertAggregation( FUNCTION_RESOLUTION, @@ -101,7 +101,7 @@ public void testMaxDoubleDouble() ImmutableList.of(2.5), createDoublesBlock(2.5, 2.0, 5.0, 3.0), createDoublesBlock(4.0, 1.5, 2.0, 3.0), - createRLEBlock(1L, 4)); + createRepeatedValuesBlock(1L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -110,7 +110,7 @@ public void testMaxDoubleDouble() ImmutableList.of(2.5, 3.0), createDoublesBlock(2.5, 2.0, 5.0, 3.0), createDoublesBlock(4.0, 1.5, 2.0, 3.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); } @Test @@ -124,7 +124,7 @@ public void testMinDoubleDouble() Arrays.asList((Double) null), createDoublesBlock(1.0, null), createDoublesBlock(5.0, 3.0), - createRLEBlock(1L, 2)); + createRepeatedValuesBlock(1L, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -133,7 +133,7 @@ public void testMinDoubleDouble() null, createDoublesBlock(null, null), createDoublesBlock(null, null), - createRLEBlock(1L, 2)); + createRepeatedValuesBlock(1L, 2)); assertAggregation( FUNCTION_RESOLUTION, @@ -142,7 +142,7 @@ public void testMinDoubleDouble() ImmutableList.of(2.0), createDoublesBlock(2.5, 2.0, 5.0, 3.0), createDoublesBlock(4.0, 1.5, 2.0, 3.0), - createRLEBlock(1L, 4)); + createRepeatedValuesBlock(1L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -151,7 +151,7 @@ public void testMinDoubleDouble() ImmutableList.of(2.0, 5.0), createDoublesBlock(2.5, 2.0, 5.0, 3.0), createDoublesBlock(4.0, 1.5, 2.0, 3.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); } @Test @@ -165,7 +165,7 @@ public void testMinDoubleVarchar() ImmutableList.of("z", "a"), createStringsBlock("z", "a", "x", "b"), createDoublesBlock(1.0, 2.0, 2.0, 3.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -174,7 +174,7 @@ public void testMinDoubleVarchar() ImmutableList.of("a", "zz"), createStringsBlock("zz", "hi", "bb", "a"), createDoublesBlock(0.0, 1.0, 2.0, -1.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -183,7 +183,7 @@ public void testMinDoubleVarchar() ImmutableList.of("a", "zz"), createStringsBlock("zz", "hi", null, "a"), createDoublesBlock(0.0, 1.0, null, -1.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -192,7 +192,7 @@ public void testMinDoubleVarchar() ImmutableList.of("b", "c"), createStringsBlock("a", "b", "c", "d"), createDoublesBlock(Double.NaN, 2.0, 3.0, 4.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -201,7 +201,7 @@ public void testMinDoubleVarchar() ImmutableList.of("a", "c"), createStringsBlock("a", "b", "c", "d"), createDoublesBlock(1.0, Double.NaN, 3.0, 4.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -210,7 +210,7 @@ public void testMinDoubleVarchar() ImmutableList.of("a", "b"), createStringsBlock("a", "b", "c", "d"), createDoublesBlock(1.0, 2.0, Double.NaN, 4.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -219,7 +219,7 @@ public void testMinDoubleVarchar() ImmutableList.of("a", "b"), createStringsBlock("a", "b", "c", "d"), createDoublesBlock(1.0, 2.0, 3.0, Double.NaN), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -228,7 +228,7 @@ public void testMinDoubleVarchar() ImmutableList.of("a", "b"), createStringsBlock("a", "b"), createDoublesBlock(1.0, Double.NaN), - createRLEBlock(2L, 2)); + createRepeatedValuesBlock(2L, 2)); } @Test @@ -242,7 +242,7 @@ public void testMaxDoubleVarchar() ImmutableList.of("a", "z"), createStringsBlock("z", "a", null), createDoublesBlock(1.0, 2.0, null), - createRLEBlock(2L, 3)); + createRepeatedValuesBlock(2L, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -251,7 +251,7 @@ public void testMaxDoubleVarchar() ImmutableList.of("bb", "hi"), createStringsBlock("zz", "hi", "bb", "a"), createDoublesBlock(0.0, 1.0, 2.0, -1.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -260,7 +260,7 @@ public void testMaxDoubleVarchar() ImmutableList.of("hi", "zz"), createStringsBlock("zz", "hi", null, "a"), createDoublesBlock(0.0, 1.0, null, -1.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -269,7 +269,7 @@ public void testMaxDoubleVarchar() ImmutableList.of("d", "c"), createStringsBlock("a", "b", "c", "d"), createDoublesBlock(Double.NaN, 2.0, 3.0, 4.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -278,7 +278,7 @@ public void testMaxDoubleVarchar() ImmutableList.of("d", "c"), createStringsBlock("a", "b", "c", "d"), createDoublesBlock(1.0, Double.NaN, 3.0, 4.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -287,7 +287,7 @@ public void testMaxDoubleVarchar() ImmutableList.of("d", "b"), createStringsBlock("a", "b", "c", "d"), createDoublesBlock(1.0, 2.0, Double.NaN, 4.0), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -296,7 +296,7 @@ public void testMaxDoubleVarchar() ImmutableList.of("c", "b"), createStringsBlock("a", "b", "c", "d"), createDoublesBlock(1.0, 2.0, 3.0, Double.NaN), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -305,7 +305,7 @@ public void testMaxDoubleVarchar() ImmutableList.of("a", "b"), createStringsBlock("a", "b"), createDoublesBlock(1.0, Double.NaN), - createRLEBlock(2L, 2)); + createRepeatedValuesBlock(2L, 2)); } @Test @@ -319,7 +319,7 @@ public void testMinRealVarchar() ImmutableList.of("z", "a"), createStringsBlock("z", "a", "x", "b"), createBlockOfReals(1.0f, 2.0f, 2.0f, 3.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -328,7 +328,7 @@ public void testMinRealVarchar() ImmutableList.of("a", "zz"), createStringsBlock("zz", "hi", "bb", "a"), createBlockOfReals(0.0f, 1.0f, 2.0f, -1.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -337,7 +337,7 @@ public void testMinRealVarchar() ImmutableList.of("a", "zz"), createStringsBlock("zz", "hi", null, "a"), createBlockOfReals(0.0f, 1.0f, null, -1.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -346,7 +346,7 @@ public void testMinRealVarchar() ImmutableList.of("b", "c"), createStringsBlock("a", "b", "c", "d"), createBlockOfReals(Float.NaN, 2.0f, 3.0f, 4.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -355,7 +355,7 @@ public void testMinRealVarchar() ImmutableList.of("a", "c"), createStringsBlock("a", "b", "c", "d"), createBlockOfReals(1.0f, Float.NaN, 3.0f, 4.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -364,7 +364,7 @@ public void testMinRealVarchar() ImmutableList.of("a", "b"), createStringsBlock("a", "b", "c", "d"), createBlockOfReals(1.0f, 2.0f, Float.NaN, 4.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -373,7 +373,7 @@ public void testMinRealVarchar() ImmutableList.of("a", "b"), createStringsBlock("a", "b", "c", "d"), createBlockOfReals(1.0f, 2.0f, 3.0f, Float.NaN), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -382,7 +382,7 @@ public void testMinRealVarchar() ImmutableList.of("a", "b"), createStringsBlock("a", "b"), createBlockOfReals(1.0f, Float.NaN), - createRLEBlock(2L, 2)); + createRepeatedValuesBlock(2L, 2)); } @Test @@ -396,7 +396,7 @@ public void testMaxRealVarchar() ImmutableList.of("a", "z"), createStringsBlock("z", "a", null), createBlockOfReals(1.0f, 2.0f, null), - createRLEBlock(2L, 3)); + createRepeatedValuesBlock(2L, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -405,7 +405,7 @@ public void testMaxRealVarchar() ImmutableList.of("bb", "hi"), createStringsBlock("zz", "hi", "bb", "a"), createBlockOfReals(0.0f, 1.0f, 2.0f, -1.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -414,7 +414,7 @@ public void testMaxRealVarchar() ImmutableList.of("hi", "zz"), createStringsBlock("zz", "hi", null, "a"), createBlockOfReals(0.0f, 1.0f, null, -1.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -423,7 +423,7 @@ public void testMaxRealVarchar() ImmutableList.of("d", "c"), createStringsBlock("a", "b", "c", "d"), createBlockOfReals(Float.NaN, 2.0f, 3.0f, 4.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -432,7 +432,7 @@ public void testMaxRealVarchar() ImmutableList.of("d", "c"), createStringsBlock("a", "b", "c", "d"), createBlockOfReals(1.0f, Float.NaN, 3.0f, 4.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -441,7 +441,7 @@ public void testMaxRealVarchar() ImmutableList.of("d", "b"), createStringsBlock("a", "b", "c", "d"), createBlockOfReals(1.0f, 2.0f, Float.NaN, 4.0f), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -450,7 +450,7 @@ public void testMaxRealVarchar() ImmutableList.of("c", "b"), createStringsBlock("a", "b", "c", "d"), createBlockOfReals(1.0f, 2.0f, 3.0f, Float.NaN), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -459,7 +459,7 @@ public void testMaxRealVarchar() ImmutableList.of("a", "b"), createStringsBlock("a", "b"), createBlockOfReals(1.0f, Float.NaN), - createRLEBlock(2L, 2)); + createRepeatedValuesBlock(2L, 2)); } @Test @@ -473,7 +473,7 @@ public void testMinVarcharDouble() ImmutableList.of(2.0, 3.0), createDoublesBlock(1.0, 2.0, 2.0, 3.0), createStringsBlock("z", "a", "x", "b"), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -482,7 +482,7 @@ public void testMinVarcharDouble() ImmutableList.of(-1.0, 2.0), createDoublesBlock(0.0, 1.0, 2.0, -1.0), createStringsBlock("zz", "hi", "bb", "a"), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -491,7 +491,7 @@ public void testMinVarcharDouble() ImmutableList.of(-1.0, 1.0), createDoublesBlock(0.0, 1.0, null, -1.0), createStringsBlock("zz", "hi", null, "a"), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); } @Test @@ -505,7 +505,7 @@ public void testMaxVarcharDouble() ImmutableList.of(1.0, 2.0), createDoublesBlock(1.0, 2.0, null), createStringsBlock("z", "a", null), - createRLEBlock(2L, 3)); + createRepeatedValuesBlock(2L, 3)); assertAggregation( FUNCTION_RESOLUTION, @@ -514,7 +514,7 @@ public void testMaxVarcharDouble() ImmutableList.of(0.0, 1.0), createDoublesBlock(0.0, 1.0, 2.0, -1.0), createStringsBlock("zz", "hi", "bb", "a"), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); assertAggregation( FUNCTION_RESOLUTION, @@ -523,7 +523,7 @@ public void testMaxVarcharDouble() ImmutableList.of(0.0, 1.0), createDoublesBlock(0.0, 1.0, null, -1.0), createStringsBlock("zz", "hi", null, "a"), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); } @Test @@ -537,7 +537,7 @@ public void testMinVarcharArray() ImmutableList.of(ImmutableList.of(2L, 3L), ImmutableList.of(4L, 5L)), createArrayBigintBlock(ImmutableList.of(ImmutableList.of(1L, 2L), ImmutableList.of(2L, 3L), ImmutableList.of(3L, 4L), ImmutableList.of(4L, 5L))), createStringsBlock("z", "a", "x", "b"), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); } @Test @@ -551,7 +551,7 @@ public void testMaxVarcharArray() ImmutableList.of(ImmutableList.of(1L, 2L), ImmutableList.of(3L, 4L)), createArrayBigintBlock(ImmutableList.of(ImmutableList.of(1L, 2L), ImmutableList.of(2L, 3L), ImmutableList.of(3L, 4L), ImmutableList.of(4L, 5L))), createStringsBlock("z", "a", "x", "b"), - createRLEBlock(2L, 4)); + createRepeatedValuesBlock(2L, 4)); } @Test @@ -565,7 +565,7 @@ public void testMinArrayVarchar() ImmutableList.of("b", "x", "z"), createStringsBlock("z", "a", "x", "b"), createArrayBigintBlock(ImmutableList.of(ImmutableList.of(1L, 2L), ImmutableList.of(2L, 3L), ImmutableList.of(0L, 3L), ImmutableList.of(0L, 2L))), - createRLEBlock(3L, 4)); + createRepeatedValuesBlock(3L, 4)); } @Test @@ -579,7 +579,7 @@ public void testMaxArrayVarchar() ImmutableList.of("a", "z", "x"), createStringsBlock("z", "a", "x", "b"), createArrayBigintBlock(ImmutableList.of(ImmutableList.of(1L, 2L), ImmutableList.of(2L, 3L), ImmutableList.of(0L, 3L), ImmutableList.of(0L, 2L))), - createRLEBlock(3L, 4)); + createRepeatedValuesBlock(3L, 4)); } @Test diff --git a/core/trino-main/src/test/java/io/trino/operator/output/BenchmarkPartitionedOutputOperator.java b/core/trino-main/src/test/java/io/trino/operator/output/BenchmarkPartitionedOutputOperator.java index cf9db82fc12a..d5515fca29ab 100644 --- a/core/trino-main/src/test/java/io/trino/operator/output/BenchmarkPartitionedOutputOperator.java +++ b/core/trino-main/src/test/java/io/trino/operator/output/BenchmarkPartitionedOutputOperator.java @@ -86,9 +86,9 @@ import static io.trino.block.BlockAssertions.chooseNullPositions; import static io.trino.block.BlockAssertions.createLongDictionaryBlock; import static io.trino.block.BlockAssertions.createLongsBlock; -import static io.trino.block.BlockAssertions.createRLEBlock; import static io.trino.block.BlockAssertions.createRandomBlockForType; import static io.trino.block.BlockAssertions.createRandomLongsBlock; +import static io.trino.block.BlockAssertions.createRepeatedValuesBlock; import static io.trino.execution.buffer.OutputBuffers.BufferType.PARTITIONED; import static io.trino.execution.buffer.OutputBuffers.createInitialEmptyOutputBuffers; import static io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext; @@ -264,14 +264,14 @@ public enum TestType positionCount, types.size(), () -> createRandomBlockForType(BigintType.BIGINT, positionCount, nullRate), - createRLEBlock(42, positionCount)); + createRepeatedValuesBlock(42, positionCount)); }), BIGINT_PARTITION_CHANNEL_RLE_NULL(BigintType.BIGINT, 20, (types, positionCount, nullRate) -> { return page( positionCount, types.size(), () -> createRandomBlockForType(BigintType.BIGINT, positionCount, nullRate), - new RunLengthEncodedBlock(createLongsBlock((Long) null), positionCount)); + RunLengthEncodedBlock.create(createLongsBlock((Long) null), positionCount)); }), LONG_DECIMAL(createDecimalType(MAX_SHORT_PRECISION + 1), 5000), DICTIONARY_LONG_DECIMAL(createDecimalType(MAX_SHORT_PRECISION + 1), 5000, PageTestUtils::createRandomDictionaryPage), @@ -313,7 +313,7 @@ public enum TestType positionCount, Optional.ofNullable(isNull), new Block[] { - new RunLengthEncodedBlock(createLongsBlock(-65128734213L), notNullPositionsCount), + RunLengthEncodedBlock.create(createLongsBlock(-65128734213L), notNullPositionsCount), createRandomLongsBlock(notNullPositionsCount, nullRate)}); }) .collect(toImmutableList())); diff --git a/core/trino-main/src/test/java/io/trino/operator/output/TestPagePartitioner.java b/core/trino-main/src/test/java/io/trino/operator/output/TestPagePartitioner.java index 2a42e4ab3767..ce38c9097bd3 100644 --- a/core/trino-main/src/test/java/io/trino/operator/output/TestPagePartitioner.java +++ b/core/trino-main/src/test/java/io/trino/operator/output/TestPagePartitioner.java @@ -71,8 +71,8 @@ import static io.trino.block.BlockAssertions.createLongDictionaryBlock; import static io.trino.block.BlockAssertions.createLongSequenceBlock; import static io.trino.block.BlockAssertions.createLongsBlock; -import static io.trino.block.BlockAssertions.createRLEBlock; import static io.trino.block.BlockAssertions.createRandomBlockForType; +import static io.trino.block.BlockAssertions.createRepeatedValuesBlock; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.BooleanType.BOOLEAN; import static io.trino.spi.type.CharType.createCharType; @@ -260,7 +260,7 @@ public void testOutputForSimplePageWithPartitionConstantAndHashBlock(Partitionin public void testPartitionPositionsWithRleNotNull(PartitioningMode partitioningMode) { PagePartitioner pagePartitioner = pagePartitioner(BIGINT, BIGINT).build(); - Page page = new Page(createRLEBlock(0, POSITIONS_PER_PAGE), createLongSequenceBlock(0, POSITIONS_PER_PAGE)); + Page page = new Page(createRepeatedValuesBlock(0, POSITIONS_PER_PAGE), createLongSequenceBlock(0, POSITIONS_PER_PAGE)); processPages(pagePartitioner, partitioningMode, page); @@ -275,7 +275,7 @@ public void testPartitionPositionsWithRleNotNull(PartitioningMode partitioningMo public void testPartitionPositionsWithRleNotNullWithReplication(PartitioningMode partitioningMode) { PagePartitioner pagePartitioner = pagePartitioner(BIGINT, BIGINT).replicate().build(); - Page page = new Page(createRLEBlock(0, POSITIONS_PER_PAGE), createLongSequenceBlock(0, POSITIONS_PER_PAGE)); + Page page = new Page(createRepeatedValuesBlock(0, POSITIONS_PER_PAGE), createLongSequenceBlock(0, POSITIONS_PER_PAGE)); processPages(pagePartitioner, partitioningMode, page); @@ -289,7 +289,7 @@ public void testPartitionPositionsWithRleNotNullWithReplication(PartitioningMode public void testPartitionPositionsWithRleNullWithNullChannel(PartitioningMode partitioningMode) { PagePartitioner pagePartitioner = pagePartitioner(BIGINT, BIGINT).withNullChannel(0).build(); - Page page = new Page(new RunLengthEncodedBlock(createLongsBlock((Long) null), POSITIONS_PER_PAGE), createLongSequenceBlock(0, POSITIONS_PER_PAGE)); + Page page = new Page(RunLengthEncodedBlock.create(createLongsBlock((Long) null), POSITIONS_PER_PAGE), createLongSequenceBlock(0, POSITIONS_PER_PAGE)); processPages(pagePartitioner, partitioningMode, page); @@ -317,7 +317,7 @@ public void testOutputForDictionaryBlock(PartitioningMode partitioningMode) public void testOutputForOneValueDictionaryBlock(PartitioningMode partitioningMode) { PagePartitioner pagePartitioner = pagePartitioner(BIGINT).build(); - Page page = new Page(new DictionaryBlock(createLongsBlock(0), new int[] {0, 0, 0, 0})); + Page page = new Page(DictionaryBlock.create(4, createLongsBlock(0), new int[] {0, 0, 0, 0})); processPages(pagePartitioner, partitioningMode, page); @@ -331,7 +331,7 @@ public void testOutputForOneValueDictionaryBlock(PartitioningMode partitioningMo public void testOutputForViewDictionaryBlock(PartitioningMode partitioningMode) { PagePartitioner pagePartitioner = pagePartitioner(BIGINT).build(); - Page page = new Page(new DictionaryBlock(createLongSequenceBlock(4, 8), new int[] {1, 0, 3, 2})); + Page page = new Page(DictionaryBlock.create(4, createLongSequenceBlock(4, 8), new int[] {1, 0, 3, 2})); processPages(pagePartitioner, partitioningMode, page); diff --git a/core/trino-main/src/test/java/io/trino/operator/output/TestPositionsAppender.java b/core/trino-main/src/test/java/io/trino/operator/output/TestPositionsAppender.java index 183212d586d2..c51de0735545 100644 --- a/core/trino-main/src/test/java/io/trino/operator/output/TestPositionsAppender.java +++ b/core/trino-main/src/test/java/io/trino/operator/output/TestPositionsAppender.java @@ -71,7 +71,6 @@ import static io.trino.block.BlockAssertions.createSmallintsBlock; import static io.trino.block.BlockAssertions.createStringsBlock; import static io.trino.block.BlockAssertions.createTinyintsBlock; -import static io.trino.spi.block.DictionaryId.randomDictionaryId; import static io.trino.spi.block.PageBuilderStatus.DEFAULT_MAX_PAGE_SIZE_IN_BYTES; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.CharType.createCharType; @@ -219,17 +218,13 @@ public void testConsecutiveBuilds(TestType type) assertBlockEquals(type.getType(), positionsAppender.build(), block); // append not null rle - Block rleBlock = rleBlock(type, 1); - positionsAppender.append(allPositions(1), rleBlock); + Block rleBlock = rleBlock(type, 10); + positionsAppender.append(allPositions(10), rleBlock); assertBlockEquals(type.getType(), positionsAppender.build(), rleBlock); - // append empty rle - positionsAppender.append(positions(), rleBlock(type, 0)); - assertEquals(positionsAppender.build().getPositionCount(), 0); - // append null rle - Block nullRleBlock = nullRleBlock(type, 1); - positionsAppender.append(allPositions(1), nullRleBlock); + Block nullRleBlock = nullRleBlock(type, 10); + positionsAppender.append(allPositions(10), nullRleBlock); assertBlockEquals(type.getType(), positionsAppender.build(), nullRleBlock); // just build to confirm appender was reset @@ -245,11 +240,11 @@ public void testSliceRle() PositionsAppender positionsAppender = POSITIONS_APPENDER_FACTORY.create(VARCHAR, 10, DEFAULT_MAX_PAGE_SIZE_IN_BYTES); // first append some not empty value to avoid RleAwarePositionsAppender for the empty value - positionsAppender.appendRle(new RunLengthEncodedBlock(singleValueBlock("some value"), 1)); + positionsAppender.appendRle(singleValueBlock("some value"), 1); // append empty value multiple times to trigger jit compilation Block emptyStringBlock = singleValueBlock(""); for (int i = 0; i < 1000; i++) { - positionsAppender.appendRle(new RunLengthEncodedBlock(emptyStringBlock, 2000)); + positionsAppender.appendRle(emptyStringBlock, 2000); } } @@ -301,17 +296,17 @@ private static IntArrayList positions(int... positions) return new IntArrayList(positions); } - private DictionaryBlock dictionaryBlock(Block dictionary, int positionCount) + private Block dictionaryBlock(Block dictionary, int positionCount) { return createRandomDictionaryBlock(dictionary, positionCount); } - private DictionaryBlock dictionaryBlock(Block dictionary, int[] ids) + private Block dictionaryBlock(Block dictionary, int[] ids) { - return new DictionaryBlock(0, ids.length, dictionary, ids, false, randomDictionaryId()); + return DictionaryBlock.create(ids.length, dictionary, ids); } - private DictionaryBlock dictionaryBlock(TestType type, int positionCount, int dictionarySize, float nullRate) + private Block dictionaryBlock(TestType type, int positionCount, int dictionarySize, float nullRate) { Block dictionary = createRandomBlockForType(type, dictionarySize, nullRate); return createRandomDictionaryBlock(dictionary, positionCount); @@ -319,19 +314,22 @@ private DictionaryBlock dictionaryBlock(TestType type, int positionCount, int di private RunLengthEncodedBlock rleBlock(Block value, int positionCount) { - return new RunLengthEncodedBlock(value, positionCount); + checkArgument(positionCount >= 2); + return (RunLengthEncodedBlock) RunLengthEncodedBlock.create(value, positionCount); } private RunLengthEncodedBlock rleBlock(TestType type, int positionCount) { + checkArgument(positionCount >= 2); Block rleValue = createRandomBlockForType(type, 1, 0); - return new RunLengthEncodedBlock(rleValue, positionCount); + return (RunLengthEncodedBlock) RunLengthEncodedBlock.create(rleValue, positionCount); } private RunLengthEncodedBlock nullRleBlock(TestType type, int positionCount) { + checkArgument(positionCount >= 2); Block rleValue = nullBlock(type, 1); - return new RunLengthEncodedBlock(rleValue, positionCount); + return (RunLengthEncodedBlock) RunLengthEncodedBlock.create(rleValue, positionCount); } private Block partiallyNullBlock(TestType type, int positionCount) @@ -509,7 +507,7 @@ private static Block adapt(Block block) { if (block instanceof RunLengthEncodedBlock) { checkArgument(block.getPositionCount() == 0 || block.isNull(0)); - return new RunLengthEncodedBlock(new TestVariableWidthBlock(0, 1, EMPTY_SLICE, new int[] {0, 0}, new boolean[] {true}), block.getPositionCount()); + return RunLengthEncodedBlock.create(new TestVariableWidthBlock(0, 1, EMPTY_SLICE, new int[] {0, 0}, new boolean[] {true}), block.getPositionCount()); } int[] offsets = new int[block.getPositionCount() + 1]; diff --git a/core/trino-main/src/test/java/io/trino/operator/output/TestSlicePositionsAppender.java b/core/trino-main/src/test/java/io/trino/operator/output/TestSlicePositionsAppender.java index a102a75667c8..fde228c6793d 100644 --- a/core/trino-main/src/test/java/io/trino/operator/output/TestSlicePositionsAppender.java +++ b/core/trino-main/src/test/java/io/trino/operator/output/TestSlicePositionsAppender.java @@ -38,12 +38,12 @@ public void testAppendEmptySliceRle() { // test SlicePositionAppender.appendRle with empty value (Slice with length 0) PositionsAppender positionsAppender = new SlicePositionsAppender(1, 100); - RunLengthEncodedBlock rleBlock = new RunLengthEncodedBlock(createStringsBlock(""), 10); - positionsAppender.appendRle(rleBlock); + Block value = createStringsBlock(""); + positionsAppender.appendRle(value, 10); Block actualBlock = positionsAppender.build(); - assertBlockEquals(VARCHAR, actualBlock, rleBlock); + assertBlockEquals(VARCHAR, actualBlock, RunLengthEncodedBlock.create(value, 10)); } // test append with VariableWidthBlock using Slice not backed by byte array diff --git a/core/trino-main/src/test/java/io/trino/operator/project/BenchmarkDictionaryBlock.java b/core/trino-main/src/test/java/io/trino/operator/project/BenchmarkDictionaryBlock.java index 4b453116a634..af04f636eb0e 100644 --- a/core/trino-main/src/test/java/io/trino/operator/project/BenchmarkDictionaryBlock.java +++ b/core/trino-main/src/test/java/io/trino/operator/project/BenchmarkDictionaryBlock.java @@ -130,10 +130,10 @@ public void setup() default: throw new IllegalArgumentException("Unrecognized value type: " + valueType); } - dictionaryBlock = new DictionaryBlock(mapBlock, positionsIds); + dictionaryBlock = (DictionaryBlock) DictionaryBlock.create(positionsIds.length, mapBlock, positionsIds); int[] allPositions = IntStream.range(0, POSITIONS).toArray(); - allPositionsDictionaryBlock = new DictionaryBlock(mapBlock, allPositions); - allPositionsCompactDictionaryBlock = new DictionaryBlock(POSITIONS, mapBlock, allPositions, true); + allPositionsDictionaryBlock = (DictionaryBlock) DictionaryBlock.create(allPositions.length, mapBlock, allPositions); + allPositionsCompactDictionaryBlock = (DictionaryBlock) DictionaryBlock.create(POSITIONS, mapBlock, allPositions); } private static Block createVarcharMapBlock(int positionCount) @@ -156,7 +156,7 @@ private static Block createVarcharDictionaryBlock(List values) for (int i = 0; i < ids.length; i++) { ids[i] = i; } - return new DictionaryBlock(dictionary, ids); + return DictionaryBlock.create(ids.length, dictionary, ids); } private static Block createIntMapBlock(int positionCount) @@ -179,7 +179,7 @@ private static Block createIntDictionaryBlock(int positionCount) for (int i = 0; i < ids.length; i++) { ids[i] = i; } - return new DictionaryBlock(dictionary, ids); + return DictionaryBlock.create(ids.length, dictionary, ids); } private static Block createIntBlock(int positionCount) diff --git a/core/trino-main/src/test/java/io/trino/operator/project/TestDictionaryAwarePageFilter.java b/core/trino-main/src/test/java/io/trino/operator/project/TestDictionaryAwarePageFilter.java index 2c533559ad81..8b47e8b86444 100644 --- a/core/trino-main/src/test/java/io/trino/operator/project/TestDictionaryAwarePageFilter.java +++ b/core/trino-main/src/test/java/io/trino/operator/project/TestDictionaryAwarePageFilter.java @@ -62,9 +62,9 @@ public void testRleBlock() private static void testRleBlock(boolean filterRange) { DictionaryAwarePageFilter filter = createDictionaryAwarePageFilter(filterRange, LongArrayBlock.class); - RunLengthEncodedBlock match = new RunLengthEncodedBlock(createLongSequenceBlock(4, 5), 100); + RunLengthEncodedBlock match = (RunLengthEncodedBlock) RunLengthEncodedBlock.create(createLongSequenceBlock(4, 5), 100); testFilter(filter, match, filterRange); - RunLengthEncodedBlock noMatch = new RunLengthEncodedBlock(createLongSequenceBlock(0, 1), 100); + RunLengthEncodedBlock noMatch = (RunLengthEncodedBlock) RunLengthEncodedBlock.create(createLongSequenceBlock(0, 1), 100); testFilter(filter, noMatch, filterRange); } @@ -72,7 +72,7 @@ private static void testRleBlock(boolean filterRange) public void testRleBlockWithFailure() { DictionaryAwarePageFilter filter = createDictionaryAwarePageFilter(true, LongArrayBlock.class); - RunLengthEncodedBlock fail = new RunLengthEncodedBlock(createLongSequenceBlock(-10, -9), 100); + RunLengthEncodedBlock fail = (RunLengthEncodedBlock) RunLengthEncodedBlock.create(createLongSequenceBlock(-10, -9), 100); assertThatThrownBy(() -> testFilter(filter, fail, true)) .isInstanceOf(NegativeValueException.class) .hasMessage("value is negative: -10"); @@ -88,7 +88,7 @@ public void testDictionaryBlock() testFilter(createDictionaryBlock(20, 0), LongArrayBlock.class); // match all - testFilter(new DictionaryBlock(createLongSequenceBlock(4, 5), new int[100]), LongArrayBlock.class); + testFilter(DictionaryBlock.create(100, createLongSequenceBlock(4, 5), new int[100]), LongArrayBlock.class); } @Test @@ -109,7 +109,7 @@ public void testDictionaryBlockProcessingWithUnusedFailure() testFilter(createDictionaryBlockWithUnusedEntries(20, 0), DictionaryBlock.class); // match all - testFilter(new DictionaryBlock(createLongsBlock(4, 5, -1), new int[100]), DictionaryBlock.class); + testFilter(DictionaryBlock.create(100, createLongsBlock(4, 5, -1), new int[100]), DictionaryBlock.class); } @Test @@ -118,8 +118,8 @@ public void testDictionaryProcessingEnableDisable() TestDictionaryFilter nestedFilter = new TestDictionaryFilter(true); DictionaryAwarePageFilter filter = new DictionaryAwarePageFilter(nestedFilter); - DictionaryBlock ineffectiveBlock = createDictionaryBlock(100, 20); - DictionaryBlock effectiveBlock = createDictionaryBlock(10, 100); + Block ineffectiveBlock = createDictionaryBlock(100, 20); + Block effectiveBlock = createDictionaryBlock(10, 100); // function will always processes the first dictionary nestedFilter.setExpectedType(LongArrayBlock.class); @@ -138,28 +138,28 @@ public void testDictionaryProcessingEnableDisable() testFilter(filter, effectiveBlock, true); } - private static DictionaryBlock createDictionaryBlock(int dictionarySize, int blockSize) + private static Block createDictionaryBlock(int dictionarySize, int blockSize) { Block dictionary = createLongSequenceBlock(0, dictionarySize); int[] ids = new int[blockSize]; Arrays.setAll(ids, index -> index % dictionarySize); - return new DictionaryBlock(dictionary, ids); + return DictionaryBlock.create(ids.length, dictionary, ids); } - private static DictionaryBlock createDictionaryBlockWithFailure(int dictionarySize, int blockSize) + private static Block createDictionaryBlockWithFailure(int dictionarySize, int blockSize) { Block dictionary = createLongSequenceBlock(-10, dictionarySize - 10); int[] ids = new int[blockSize]; Arrays.setAll(ids, index -> index % dictionarySize); - return new DictionaryBlock(dictionary, ids); + return DictionaryBlock.create(ids.length, dictionary, ids); } - private static DictionaryBlock createDictionaryBlockWithUnusedEntries(int dictionarySize, int blockSize) + private static Block createDictionaryBlockWithUnusedEntries(int dictionarySize, int blockSize) { Block dictionary = createLongSequenceBlock(-10, dictionarySize); int[] ids = new int[blockSize]; Arrays.setAll(ids, index -> (index % dictionarySize) + 10); - return new DictionaryBlock(dictionary, ids); + return DictionaryBlock.create(ids.length, dictionary, ids); } private static void testFilter(Block block, Class expectedType) diff --git a/core/trino-main/src/test/java/io/trino/operator/project/TestDictionaryAwarePageProjection.java b/core/trino-main/src/test/java/io/trino/operator/project/TestDictionaryAwarePageProjection.java index 289bff97cc8f..47cc3c6110c5 100644 --- a/core/trino-main/src/test/java/io/trino/operator/project/TestDictionaryAwarePageProjection.java +++ b/core/trino-main/src/test/java/io/trino/operator/project/TestDictionaryAwarePageProjection.java @@ -90,7 +90,7 @@ public void testSimpleBlock(boolean forceYield, boolean produceLazyBlock) public void testRleBlock(boolean forceYield, boolean produceLazyBlock) { Block value = createLongSequenceBlock(42, 43); - RunLengthEncodedBlock block = new RunLengthEncodedBlock(value, 100); + RunLengthEncodedBlock block = (RunLengthEncodedBlock) RunLengthEncodedBlock.create(value, 100); testProject(block, RunLengthEncodedBlock.class, forceYield, produceLazyBlock); } @@ -99,7 +99,7 @@ public void testRleBlock(boolean forceYield, boolean produceLazyBlock) public void testRleBlockWithFailure(boolean forceYield, boolean produceLazyBlock) { Block value = createLongSequenceBlock(-43, -42); - RunLengthEncodedBlock block = new RunLengthEncodedBlock(value, 100); + RunLengthEncodedBlock block = (RunLengthEncodedBlock) RunLengthEncodedBlock.create(value, 100); testProjectFails(block, RunLengthEncodedBlock.class, forceYield, produceLazyBlock); } @@ -107,7 +107,7 @@ public void testRleBlockWithFailure(boolean forceYield, boolean produceLazyBlock @Test(dataProvider = "forceYield") public void testDictionaryBlock(boolean forceYield, boolean produceLazyBlock) { - DictionaryBlock block = createDictionaryBlock(10, 100); + Block block = createDictionaryBlock(10, 100); testProject(block, DictionaryBlock.class, forceYield, produceLazyBlock); } @@ -115,7 +115,7 @@ public void testDictionaryBlock(boolean forceYield, boolean produceLazyBlock) @Test(dataProvider = "forceYield") public void testDictionaryBlockWithFailure(boolean forceYield, boolean produceLazyBlock) { - DictionaryBlock block = createDictionaryBlockWithFailure(10, 100); + Block block = createDictionaryBlockWithFailure(10, 100); testProjectFails(block, DictionaryBlock.class, forceYield, produceLazyBlock); } @@ -123,7 +123,7 @@ public void testDictionaryBlockWithFailure(boolean forceYield, boolean produceLa @Test(dataProvider = "forceYield") public void testDictionaryBlockProcessingWithUnusedFailure(boolean forceYield, boolean produceLazyBlock) { - DictionaryBlock block = createDictionaryBlockWithUnusedEntries(10, 100); + Block block = createDictionaryBlockWithUnusedEntries(10, 100); // failures in the dictionary processing will cause a fallback to normal columnar processing testProject(block, LongArrayBlock.class, forceYield, produceLazyBlock); @@ -135,7 +135,7 @@ public void testDictionaryProcessingIgnoreYield() DictionaryAwarePageProjection projection = createProjection(false); // the same input block will bypass yield with multiple projections - DictionaryBlock block = createDictionaryBlock(10, 100); + Block block = createDictionaryBlock(10, 100); testProjectRange(block, DictionaryBlock.class, projection, true, false); testProjectFastReturnIgnoreYield(block, projection, false); testProjectFastReturnIgnoreYield(block, projection, false); @@ -148,7 +148,7 @@ public void testDictionaryProcessingEnableDisable(boolean forceYield, boolean pr DictionaryAwarePageProjection projection = createProjection(produceLazyBlock); // function will always processes the first dictionary - DictionaryBlock ineffectiveBlock = createDictionaryBlock(100, 20); + Block ineffectiveBlock = createDictionaryBlock(100, 20); testProjectRange(ineffectiveBlock, DictionaryBlock.class, projection, forceYield, produceLazyBlock); testProjectFastReturnIgnoreYield(ineffectiveBlock, projection, produceLazyBlock); // dictionary processing can reuse the last dictionary @@ -156,7 +156,7 @@ public void testDictionaryProcessingEnableDisable(boolean forceYield, boolean pr testProjectList(ineffectiveBlock, DictionaryBlock.class, projection, false, produceLazyBlock); // last dictionary not effective, so dictionary processing is disabled - DictionaryBlock effectiveBlock = createDictionaryBlock(10, 100); + Block effectiveBlock = createDictionaryBlock(10, 100); testProjectRange(effectiveBlock, LongArrayBlock.class, projection, forceYield, produceLazyBlock); testProjectList(effectiveBlock, LongArrayBlock.class, projection, forceYield, produceLazyBlock); @@ -180,17 +180,17 @@ public void testPreservesDictionaryInstance() block -> randomDictionaryId(), false); Block dictionary = createLongsBlock(0, 1); - DictionaryBlock firstDictionaryBlock = new DictionaryBlock(dictionary, new int[] {0, 1, 2, 3}); - DictionaryBlock secondDictionaryBlock = new DictionaryBlock(dictionary, new int[] {3, 2, 1, 0}); + Block firstDictionaryBlock = DictionaryBlock.create(4, dictionary, new int[] {0, 1, 2, 3}); + Block secondDictionaryBlock = DictionaryBlock.create(4, dictionary, new int[] {3, 2, 1, 0}); DriverYieldSignal yieldSignal = new DriverYieldSignal(); - Work firstWork = projection.project(null, yieldSignal, new Page(firstDictionaryBlock), SelectedPositions.positionsList(new int[] {0}, 0, 1)); + Work firstWork = projection.project(null, yieldSignal, new Page(firstDictionaryBlock), SelectedPositions.positionsList(new int[] {0, 1}, 0, 2)); assertTrue(firstWork.process()); Block firstOutputBlock = firstWork.getResult(); assertInstanceOf(firstOutputBlock, DictionaryBlock.class); - Work secondWork = projection.project(null, yieldSignal, new Page(secondDictionaryBlock), SelectedPositions.positionsList(new int[] {0}, 0, 1)); + Work secondWork = projection.project(null, yieldSignal, new Page(secondDictionaryBlock), SelectedPositions.positionsList(new int[] {0, 1}, 0, 2)); assertTrue(secondWork.process()); Block secondOutputBlock = secondWork.getResult(); @@ -203,28 +203,28 @@ public void testPreservesDictionaryInstance() assertSame(firstDictionary, dictionary); } - private static DictionaryBlock createDictionaryBlock(int dictionarySize, int blockSize) + private static Block createDictionaryBlock(int dictionarySize, int blockSize) { Block dictionary = createLongSequenceBlock(0, dictionarySize); int[] ids = new int[blockSize]; Arrays.setAll(ids, index -> index % dictionarySize); - return new DictionaryBlock(dictionary, ids); + return DictionaryBlock.create(ids.length, dictionary, ids); } - private static DictionaryBlock createDictionaryBlockWithFailure(int dictionarySize, int blockSize) + private static Block createDictionaryBlockWithFailure(int dictionarySize, int blockSize) { Block dictionary = createLongSequenceBlock(-10, dictionarySize - 10); int[] ids = new int[blockSize]; Arrays.setAll(ids, index -> index % dictionarySize); - return new DictionaryBlock(dictionary, ids); + return DictionaryBlock.create(ids.length, dictionary, ids); } - private static DictionaryBlock createDictionaryBlockWithUnusedEntries(int dictionarySize, int blockSize) + private static Block createDictionaryBlockWithUnusedEntries(int dictionarySize, int blockSize) { Block dictionary = createLongSequenceBlock(-10, dictionarySize); int[] ids = new int[blockSize]; Arrays.setAll(ids, index -> (index % dictionarySize) + 10); - return new DictionaryBlock(dictionary, ids); + return DictionaryBlock.create(ids.length, dictionary, ids); } private static Block projectWithYield(Work work, DriverYieldSignal yieldSignal) diff --git a/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkArraySubscript.java b/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkArraySubscript.java index 75dbf165e9ae..68e5326c6b51 100644 --- a/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkArraySubscript.java +++ b/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkArraySubscript.java @@ -198,7 +198,7 @@ private static Block createDictionaryValueBlock(int positionCount, int mapSize) for (int i = 0; i < keyIds.length; i++) { keyIds[i] = ThreadLocalRandom.current().nextInt(0, dictionarySize); } - return new DictionaryBlock(dictionaryBlock, keyIds); + return DictionaryBlock.create(keyIds.length, dictionaryBlock, keyIds); } private static String randomString(int length) diff --git a/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkMapConcat.java b/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkMapConcat.java index 538f49280d95..4367384b11fd 100644 --- a/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkMapConcat.java +++ b/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkMapConcat.java @@ -169,7 +169,7 @@ private static Block createKeyBlock(int positionCount, List keys) for (int i = 0; i < keyIds.length; i++) { keyIds[i] = i % keys.size(); } - return new DictionaryBlock(keyDictionaryBlock, keyIds); + return DictionaryBlock.create(keyIds.length, keyDictionaryBlock, keyIds); } private static Block createValueBlock(int positionCount, int mapSize) diff --git a/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkMapSubscript.java b/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkMapSubscript.java index 26cef1c1255b..13ec1d8c30df 100644 --- a/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkMapSubscript.java +++ b/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkMapSubscript.java @@ -180,7 +180,7 @@ private static Block createKeyBlock(int positionCount, List keys) for (int i = 0; i < keyIds.length; i++) { keyIds[i] = i % keys.size(); } - return new DictionaryBlock(keyDictionaryBlock, keyIds); + return DictionaryBlock.create(keyIds.length, keyDictionaryBlock, keyIds); } private static Block createFixWidthValueBlock(int positionCount, int mapSize) @@ -219,7 +219,7 @@ private static Block createDictionaryValueBlock(int positionCount, int mapSize) for (int i = 0; i < keyIds.length; i++) { keyIds[i] = ThreadLocalRandom.current().nextInt(0, dictionarySize); } - return new DictionaryBlock(dictionaryBlock, keyIds); + return DictionaryBlock.create(keyIds.length, dictionaryBlock, keyIds); } private static String randomString(int length) diff --git a/core/trino-main/src/test/java/io/trino/operator/scalar/TestPageProcessorCompiler.java b/core/trino-main/src/test/java/io/trino/operator/scalar/TestPageProcessorCompiler.java index 44f1b576a4eb..501723a6b73f 100644 --- a/core/trino-main/src/test/java/io/trino/operator/scalar/TestPageProcessorCompiler.java +++ b/core/trino-main/src/test/java/io/trino/operator/scalar/TestPageProcessorCompiler.java @@ -22,6 +22,7 @@ import io.trino.operator.DriverYieldSignal; import io.trino.operator.project.PageProcessor; import io.trino.spi.Page; +import io.trino.spi.block.Block; import io.trino.spi.block.DictionaryBlock; import io.trino.spi.block.RunLengthEncodedBlock; import io.trino.spi.type.ArrayType; @@ -36,7 +37,7 @@ import static com.google.common.collect.Iterators.getOnlyElement; import static io.trino.block.BlockAssertions.createLongDictionaryBlock; -import static io.trino.block.BlockAssertions.createRLEBlock; +import static io.trino.block.BlockAssertions.createRepeatedValuesBlock; import static io.trino.block.BlockAssertions.createSlicesBlock; import static io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext; import static io.trino.operator.project.PageProcessor.MAX_BATCH_SIZE; @@ -146,7 +147,7 @@ public void testSanityFilterOnRLE() PageProcessor processor = compiler.compilePageProcessor(Optional.of(filter), ImmutableList.of(field(0, BIGINT)), MAX_BATCH_SIZE).get(); - Page page = new Page(createRLEBlock(5L, 100)); + Page page = new Page(createRepeatedValuesBlock(5L, 100)); Page outputPage = getOnlyElement( processor.process( null, @@ -208,7 +209,7 @@ public void testNonDeterministicProject() assertFalse(outputPage.getBlock(0) instanceof DictionaryBlock); } - private static DictionaryBlock createDictionaryBlock(Slice[] expectedValues, int positionCount) + private static Block createDictionaryBlock(Slice[] expectedValues, int positionCount) { int dictionarySize = expectedValues.length; int[] ids = new int[positionCount]; @@ -216,7 +217,7 @@ private static DictionaryBlock createDictionaryBlock(Slice[] expectedValues, int for (int i = 0; i < positionCount; i++) { ids[i] = i % dictionarySize; } - return new DictionaryBlock(createSlicesBlock(expectedValues), ids); + return DictionaryBlock.create(ids.length, createSlicesBlock(expectedValues), ids); } private static Slice[] createExpectedValues(int positionCount) diff --git a/core/trino-spi/pom.xml b/core/trino-spi/pom.xml index cd7fb8fa0d4d..1a3b2c67dd1f 100644 --- a/core/trino-spi/pom.xml +++ b/core/trino-spi/pom.xml @@ -179,23 +179,62 @@ @java.lang.Deprecated(*) ^*; - + + + java.method.returnTypeChanged + method io.trino.spi.block.Block io.trino.spi.block.ArrayBlockBuilder::build() + + + java.method.visibilityReduced + method void io.trino.spi.block.DictionaryBlock::<init>(int, int, io.trino.spi.block.Block, int[], boolean, boolean, io.trino.spi.block.DictionaryId) + method void io.trino.spi.block.DictionaryBlock::<init>(int, int, io.trino.spi.block.Block, int[], boolean, boolean, io.trino.spi.block.DictionaryId) + public + private + + + true + java.method.visibilityReduced + method void io.trino.spi.block.DictionaryBlock::<init>(io.trino.spi.block.Block, int[]) + method void io.trino.spi.block.DictionaryBlock::<init>(int, int, io.trino.spi.block.Block, int[]) + public + package + + + java.method.removed + method void io.trino.spi.block.DictionaryBlock::<init>(int, io.trino.spi.block.Block, int[]) + java.method.removed - method java.util.Set<io.trino.spi.security.RoleGrant> io.trino.spi.connector.ConnectorMetadata::listAllRoleGrants(io.trino.spi.connector.ConnectorSession, java.util.Optional<java.util.Set<java.lang.String>>, java.util.Optional<java.util.Set<java.lang.String>>, java.util.OptionalLong) + method void io.trino.spi.block.DictionaryBlock::<init>(int, io.trino.spi.block.Block, int[], boolean) - java.method.addedToInterface - method io.trino.spi.block.BlockBuilder io.trino.spi.block.BlockBuilder::newBlockBuilderLike(int, io.trino.spi.block.BlockBuilderStatus) + java.method.removed + method void io.trino.spi.block.DictionaryBlock::<init>(int, io.trino.spi.block.Block, int[], boolean, io.trino.spi.block.DictionaryId) + + + java.method.removed + method void io.trino.spi.block.DictionaryBlock::<init>(int, int, io.trino.spi.block.Block, int[], boolean, io.trino.spi.block.DictionaryId) - java.field.removed - field io.trino.spi.expression.StandardFunctions.LIKE_PATTERN_FUNCTION_NAME + java.method.removed + method void io.trino.spi.block.DictionaryBlock::<init>(int, io.trino.spi.block.Block, int[], boolean, io.trino.spi.block.DictionaryId) + + + java.method.removed + method void io.trino.spi.block.DictionaryBlock::<init>(int, io.trino.spi.block.Block, int[], io.trino.spi.block.DictionaryId) java.method.returnTypeChanged - method io.trino.spi.block.Block io.trino.spi.block.ArrayBlockBuilder::build() + method io.trino.spi.block.RunLengthEncodedBlock io.trino.spi.block.RunLengthBlockEncoding::readBlock(io.trino.spi.block.BlockEncodingSerde, io.airlift.slice.SliceInput) + method io.trino.spi.block.Block io.trino.spi.block.RunLengthBlockEncoding::readBlock(io.trino.spi.block.BlockEncodingSerde, io.airlift.slice.SliceInput) + + + java.method.visibilityReduced + method void io.trino.spi.block.RunLengthEncodedBlock::<init>(io.trino.spi.block.Block, int) + method void io.trino.spi.block.RunLengthEncodedBlock::<init>(io.trino.spi.block.Block, int) + public + private diff --git a/core/trino-spi/src/main/java/io/trino/spi/Page.java b/core/trino-spi/src/main/java/io/trino/spi/Page.java index 9718251d3922..39d37c073633 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/Page.java +++ b/core/trino-spi/src/main/java/io/trino/spi/Page.java @@ -25,8 +25,6 @@ import java.util.Map; import static io.airlift.slice.SizeOf.sizeOf; -import static io.trino.spi.block.DictionaryId.randomDictionaryId; -import static java.lang.Math.min; import static java.lang.String.format; import static java.util.Objects.requireNonNull; @@ -187,7 +185,7 @@ public void compact() Map dictionaryBlocks = getRelatedDictionaryBlocks(); for (DictionaryBlockIndexes blockIndexes : dictionaryBlocks.values()) { - List compactBlocks = compactRelatedBlocks(blockIndexes.getBlocks()); + List compactBlocks = DictionaryBlock.compactRelatedBlocks(blockIndexes.getBlocks()); List indexes = blockIndexes.getIndexes(); for (int i = 0; i < compactBlocks.size(); i++) { blocks[indexes.get(i)] = compactBlocks.get(i); @@ -212,68 +210,6 @@ private Map getRelatedDictionaryBlocks() return relatedDictionaryBlocks; } - private static List compactRelatedBlocks(List blocks) - { - DictionaryBlock firstDictionaryBlock = blocks.get(0); - Block dictionary = firstDictionaryBlock.getDictionary(); - - int positionCount = firstDictionaryBlock.getPositionCount(); - int dictionarySize = dictionary.getPositionCount(); - - // determine which dictionary entries are referenced and build a reindex for them - int[] dictionaryPositionsToCopy = new int[min(dictionarySize, positionCount)]; - int[] remapIndex = new int[dictionarySize]; - Arrays.fill(remapIndex, -1); - - int numberOfIndexes = 0; - for (int i = 0; i < positionCount; i++) { - int position = firstDictionaryBlock.getId(i); - if (remapIndex[position] == -1) { - dictionaryPositionsToCopy[numberOfIndexes] = position; - remapIndex[position] = numberOfIndexes; - numberOfIndexes++; - } - } - - // entire dictionary is referenced - if (numberOfIndexes == dictionarySize) { - return blocks; - } - - // compact the dictionaries - int[] newIds = getNewIds(positionCount, firstDictionaryBlock, remapIndex); - List outputDictionaryBlocks = new ArrayList<>(blocks.size()); - DictionaryId newDictionaryId = randomDictionaryId(); - for (DictionaryBlock dictionaryBlock : blocks) { - if (!firstDictionaryBlock.getDictionarySourceId().equals(dictionaryBlock.getDictionarySourceId())) { - throw new IllegalArgumentException("dictionarySourceIds must be the same"); - } - - try { - Block compactDictionary = dictionaryBlock.getDictionary().copyPositions(dictionaryPositionsToCopy, 0, numberOfIndexes); - outputDictionaryBlocks.add(new DictionaryBlock(positionCount, compactDictionary, newIds, !(compactDictionary instanceof DictionaryBlock), newDictionaryId)); - } - catch (UnsupportedOperationException e) { - // ignore if copy positions is not supported for the dictionary - outputDictionaryBlocks.add(dictionaryBlock); - } - } - return outputDictionaryBlocks; - } - - private static int[] getNewIds(int positionCount, DictionaryBlock dictionaryBlock, int[] remapIndex) - { - int[] newIds = new int[positionCount]; - for (int i = 0; i < positionCount; i++) { - int newId = remapIndex[dictionaryBlock.getId(i)]; - if (newId == -1) { - throw new IllegalStateException("reference to a non-existent key"); - } - newIds[i] = newId; - } - return newIds; - } - /** * Returns a page that assures all data is in memory. * May return the same page if all page data is already in memory. diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/ArrayBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/ArrayBlockBuilder.java index fc67f31ac585..85d1820d4f02 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/ArrayBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/ArrayBlockBuilder.java @@ -283,9 +283,9 @@ public Block copyRegion(int position, int length) return super.copyRegion(position, length); } - private RunLengthEncodedBlock nullRle(int positionCount) + private Block nullRle(int positionCount) { ArrayBlock nullValueBlock = createArrayBlockInternal(0, 1, new boolean[] {true}, new int[] {0, 0}, values.newBlockBuilderLike(null).build()); - return new RunLengthEncodedBlock(nullValueBlock, positionCount); + return RunLengthEncodedBlock.create(nullValueBlock, positionCount); } } diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/Block.java b/core/trino-spi/src/main/java/io/trino/spi/block/Block.java index 1cad3519512c..23c621bbd4c2 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/Block.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/Block.java @@ -21,7 +21,6 @@ import java.util.function.ObjLongConsumer; import static io.trino.spi.block.BlockUtil.checkArrayRange; -import static io.trino.spi.block.DictionaryId.randomDictionaryId; public interface Block { @@ -244,7 +243,7 @@ default Block getPositions(int[] positions, int offset, int length) { checkArrayRange(positions, offset, length); - return new DictionaryBlock(offset, length, this, positions, false, randomDictionaryId()); + return new DictionaryBlock(offset, length, this, positions); } /** diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/ByteArrayBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/ByteArrayBlockBuilder.java index b0fb3fedbdee..268d2f81bba7 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/ByteArrayBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/ByteArrayBlockBuilder.java @@ -102,7 +102,7 @@ public BlockBuilder appendNull() public Block build() { if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } return new ByteArrayBlock(0, positionCount, hasNullValue ? valueIsNull : null, values); } @@ -227,7 +227,7 @@ public Block copyPositions(int[] positions, int offset, int length) checkArrayRange(positions, offset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { @@ -251,7 +251,7 @@ public Block getRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } return new ByteArrayBlock(positionOffset, length, hasNullValue ? valueIsNull : null, values); } @@ -262,7 +262,7 @@ public Block copyRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarArray.java b/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarArray.java index 6e61decf0689..c46e67f51718 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarArray.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarArray.java @@ -84,7 +84,7 @@ private static ColumnarArray toColumnarArray(DictionaryBlock dictionaryBlock) dictionaryBlock, 0, offsets, - new DictionaryBlock(dictionaryIds.length, columnarArray.getElementsBlock(), dictionaryIds)); + DictionaryBlock.create(dictionaryIds.length, columnarArray.getElementsBlock(), dictionaryIds)); } private static ColumnarArray toColumnarArray(RunLengthEncodedBlock rleBlock) @@ -112,7 +112,7 @@ private static ColumnarArray toColumnarArray(RunLengthEncodedBlock rleBlock) rleBlock, 0, offsets, - new DictionaryBlock(dictionaryIds.length, columnarArray.getElementsBlock(), dictionaryIds)); + DictionaryBlock.create(dictionaryIds.length, columnarArray.getElementsBlock(), dictionaryIds)); } private ColumnarArray(Block nullCheckBlock, int offsetsOffset, int[] offsets, Block elementsBlock) diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarMap.java b/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarMap.java index ee8bbfc584cc..42b6419827c6 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarMap.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarMap.java @@ -84,8 +84,8 @@ private static ColumnarMap toColumnarMap(DictionaryBlock dictionaryBlock) dictionaryBlock, 0, offsets, - new DictionaryBlock(dictionaryIds.length, columnarMap.getKeysBlock(), dictionaryIds), - new DictionaryBlock(dictionaryIds.length, columnarMap.getValuesBlock(), dictionaryIds)); + DictionaryBlock.create(dictionaryIds.length, columnarMap.getKeysBlock(), dictionaryIds), + DictionaryBlock.create(dictionaryIds.length, columnarMap.getValuesBlock(), dictionaryIds)); } private static ColumnarMap toColumnarMap(RunLengthEncodedBlock rleBlock) @@ -113,8 +113,8 @@ private static ColumnarMap toColumnarMap(RunLengthEncodedBlock rleBlock) rleBlock, 0, offsets, - new DictionaryBlock(dictionaryIds.length, columnarMap.getKeysBlock(), dictionaryIds), - new DictionaryBlock(dictionaryIds.length, columnarMap.getValuesBlock(), dictionaryIds)); + DictionaryBlock.create(dictionaryIds.length, columnarMap.getKeysBlock(), dictionaryIds), + DictionaryBlock.create(dictionaryIds.length, columnarMap.getValuesBlock(), dictionaryIds)); } private ColumnarMap(Block nullCheckBlock, int offsetsOffset, int[] offsets, Block keysBlock, Block valuesBlock) diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarRow.java b/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarRow.java index c5383660f939..4e8f1632cb9b 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarRow.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/ColumnarRow.java @@ -85,7 +85,7 @@ private static ColumnarRow toColumnarRow(DictionaryBlock dictionaryBlock) ColumnarRow columnarRow = toColumnarRow(dictionaryBlock.getDictionary()); Block[] fields = new Block[columnarRow.getFieldCount()]; for (int i = 0; i < columnarRow.getFieldCount(); i++) { - fields[i] = new DictionaryBlock(nonNullPositionCount, columnarRow.getField(i), dictionaryIds); + fields[i] = DictionaryBlock.create(nonNullPositionCount, columnarRow.getField(i), dictionaryIds); } int positionCount = dictionaryBlock.getPositionCount(); @@ -106,9 +106,7 @@ private static ColumnarRow toColumnarRowFromDictionaryWithoutNulls(DictionaryBlo dictionaryBlock.getRawIdsOffset(), dictionaryBlock.getPositionCount(), columnarRow.getField(i), - dictionaryBlock.getRawIds(), - false, - DictionaryId.randomDictionaryId()); + dictionaryBlock.getRawIds()); } return new ColumnarRow(dictionaryBlock.getPositionCount(), null, fields); } @@ -129,7 +127,7 @@ private static ColumnarRow toColumnarRow(RunLengthEncodedBlock rleBlock) fields[i] = nullSuppressedField; } else { - fields[i] = new RunLengthEncodedBlock(nullSuppressedField, rleBlock.getPositionCount()); + fields[i] = RunLengthEncodedBlock.create(nullSuppressedField, rleBlock.getPositionCount()); } } return new ColumnarRow(rleBlock.getPositionCount(), rleBlock, fields); diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/DictionaryBlock.java b/core/trino-spi/src/main/java/io/trino/spi/block/DictionaryBlock.java index 18030efdc95a..562efa6fbdb9 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/DictionaryBlock.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/DictionaryBlock.java @@ -17,6 +17,7 @@ import io.airlift.slice.Slices; import org.openjdk.jol.info.ClassLayout; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.OptionalInt; @@ -51,37 +52,51 @@ public class DictionaryBlock private final DictionaryId dictionarySourceId; private final boolean mayHaveNull; - public DictionaryBlock(Block dictionary, int[] ids) + public static Block create(int positionCount, Block dictionary, int[] ids) { - this(ids.length, dictionary, ids); + return createInternal(positionCount, dictionary, ids, randomDictionaryId()); } - public DictionaryBlock(int positionCount, Block dictionary, int[] ids) + /** + * This should not only be used when creating a projection of another dictionary block. + */ + public static Block createProjectedDictionaryBlock(int positionCount, Block dictionary, int[] ids, DictionaryId dictionarySourceId) { - this(0, positionCount, dictionary, ids, false, randomDictionaryId()); + return createInternal(positionCount, dictionary, ids, dictionarySourceId); } - public DictionaryBlock(int positionCount, Block dictionary, int[] ids, DictionaryId dictionaryId) + private static Block createInternal(int positionCount, Block dictionary, int[] ids, DictionaryId dictionarySourceId) { - this(0, positionCount, dictionary, ids, false, dictionaryId); - } + if (positionCount == 0) { + return dictionary.copyRegion(0, 0); + } + if (positionCount == 1) { + return dictionary.getRegion(ids[0], 1); + } - public DictionaryBlock(int positionCount, Block dictionary, int[] ids, boolean dictionaryIsCompacted) - { - this(0, positionCount, dictionary, ids, dictionaryIsCompacted, randomDictionaryId()); - } + // if dictionary is an RLE then this can just be a new RLE + if (dictionary instanceof RunLengthEncodedBlock rle) { + return RunLengthEncodedBlock.create(rle.getValue(), positionCount); + } - public DictionaryBlock(int positionCount, Block dictionary, int[] ids, boolean dictionaryIsCompacted, DictionaryId dictionarySourceId) - { - this(0, positionCount, dictionary, ids, dictionaryIsCompacted, dictionarySourceId); + // unwrap dictionary in dictionary + if (dictionary instanceof DictionaryBlock dictionaryBlock) { + int[] newIds = new int[positionCount]; + for (int position = 0; position < positionCount; position++) { + newIds[position] = dictionaryBlock.getId(ids[position]); + } + dictionary = dictionaryBlock.getDictionary(); + ids = newIds; + } + return new DictionaryBlock(0, positionCount, dictionary, ids, false, false, dictionarySourceId); } - public DictionaryBlock(int idsOffset, int positionCount, Block dictionary, int[] ids, boolean dictionaryIsCompacted, DictionaryId dictionarySourceId) + DictionaryBlock(int idsOffset, int positionCount, Block dictionary, int[] ids) { - this(idsOffset, positionCount, dictionary, ids, dictionaryIsCompacted, false, dictionarySourceId); + this(idsOffset, positionCount, dictionary, ids, false, false, randomDictionaryId()); } - public DictionaryBlock(int idsOffset, int positionCount, Block dictionary, int[] ids, boolean dictionaryIsCompacted, boolean isSequentialIds, DictionaryId dictionarySourceId) + private DictionaryBlock(int idsOffset, int positionCount, Block dictionary, int[] ids, boolean dictionaryIsCompacted, boolean isSequentialIds, DictionaryId dictionarySourceId) { requireNonNull(dictionary, "dictionary is null"); requireNonNull(ids, "ids is null"); @@ -104,9 +119,6 @@ public DictionaryBlock(int idsOffset, int positionCount, Block dictionary, int[] this.mayHaveNull = positionCount > 0 && (!dictionary.isLoaded() || dictionary.mayHaveNull()); if (dictionaryIsCompacted) { - if (dictionary instanceof DictionaryBlock) { - throw new IllegalArgumentException("compacted dictionary should not have dictionary base block"); - } this.sizeInBytes = dictionary.getSizeInBytes() + (Integer.BYTES * (long) positionCount); this.uniqueIds = dictionary.getPositionCount(); } @@ -225,7 +237,7 @@ public OptionalInt fixedSizeInBytesPerPosition() // is our fixed size per position OptionalInt dictionarySizePerPosition = dictionary.fixedSizeInBytesPerPosition(); // Nested dictionaries should not include the additional id array overhead in the result - if (dictionarySizePerPosition.isPresent() && !(dictionary instanceof DictionaryBlock)) { + if (dictionarySizePerPosition.isPresent()) { dictionarySizePerPosition = OptionalInt.of(dictionarySizePerPosition.getAsInt() + Integer.BYTES); } return dictionarySizePerPosition; @@ -247,7 +259,7 @@ private void calculateCompactSize() int uniqueIds = 0; boolean[] used = new boolean[dictionary.getPositionCount()]; // nested dictionaries are assumed not to have sequential ids - boolean isSequentialIds = !(dictionary instanceof DictionaryBlock); + boolean isSequentialIds = true; int previousPosition = -1; for (int i = 0; i < positionCount; i++) { int position = ids[idsOffset + i]; @@ -355,10 +367,6 @@ public long getPositionsSizeInBytes(boolean[] positions, int selectedPositionsCo private long getSizeInBytesForSelectedPositions(boolean[] usedIds, int uniqueIds, int selectedPositions) { long dictionarySize = dictionary.getPositionsSizeInBytes(usedIds, uniqueIds); - if (dictionary instanceof DictionaryBlock) { - // Don't include the nested ids array overhead in the resulting size - dictionarySize -= (Integer.BYTES * (long) uniqueIds); - } if (uniqueIds == dictionary.getPositionCount() && this.sizeInBytes == -1) { // All positions in the dictionary are referenced, store the uniqueId count and sizeInBytes this.uniqueIds = uniqueIds; @@ -398,7 +406,7 @@ public Block copyPositions(int[] positions, int offset, int length) { checkArrayRange(positions, offset, length); - if (length <= 1 || dictionary instanceof DictionaryBlock || uniqueIds == positionCount) { + if (length <= 1 || uniqueIds == positionCount) { // each block position is unique or the dictionary is a nested dictionary block, // therefore it makes sense to unwrap this outer dictionary layer directly int[] positionsToCopy = new int[length]; @@ -446,7 +454,7 @@ public Block getRegion(int positionOffset, int length) return this; } - return new DictionaryBlock(idsOffset + positionOffset, length, dictionary, ids, false, dictionarySourceId); + return new DictionaryBlock(idsOffset + positionOffset, length, dictionary, ids, false, false, dictionarySourceId); } @Override @@ -459,13 +467,20 @@ public Block copyRegion(int position, int length) // copy the contiguous range directly via copyRegion return dictionary.copyRegion(getId(position), length); } - if (dictionary instanceof DictionaryBlock || uniqueIds == positionCount) { + if (uniqueIds == positionCount) { // each block position is unique or the dictionary is a nested dictionary block, // therefore it makes sense to unwrap this outer dictionary layer directly return dictionary.copyPositions(ids, idsOffset + position, length); } int[] newIds = Arrays.copyOfRange(ids, idsOffset + position, idsOffset + position + length); - DictionaryBlock dictionaryBlock = new DictionaryBlock(dictionary, newIds); + DictionaryBlock dictionaryBlock = new DictionaryBlock( + 0, + newIds.length, + dictionary, + newIds, + false, + false, + randomDictionaryId()); return dictionaryBlock.compact(); } @@ -504,7 +519,7 @@ public Block getPositions(int[] positions, int offset, int length) } // All positions must have been referenced in order to be compact isCompact &= (usedIds != null && usedIds.length == uniqueIds); - DictionaryBlock result = new DictionaryBlock(newIds.length, dictionary, newIds, isCompact, getDictionarySourceId()); + DictionaryBlock result = new DictionaryBlock(0, newIds.length, dictionary, newIds, isCompact, false, getDictionarySourceId()); if (usedIds != null && !isCompact) { // resulting dictionary is not compact, but we know the number of unique ids and which positions are used result.uniqueIds = uniqueIds; @@ -540,7 +555,7 @@ public Block copyWithAppendedNull() newIds[idsOffset + positionCount] = nullIndex; } - return new DictionaryBlock(idsOffset, positionCount + 1, newDictionary, newIds, isCompact(), getDictionarySourceId()); + return new DictionaryBlock(idsOffset, positionCount + 1, newDictionary, newIds, isCompact(), false, getDictionarySourceId()); } @Override @@ -566,7 +581,7 @@ public Block getLoadedBlock() if (loadedDictionary == dictionary) { return this; } - return new DictionaryBlock(idsOffset, getPositionCount(), loadedDictionary, ids, false, randomDictionaryId()); + return new DictionaryBlock(idsOffset, getPositionCount(), loadedDictionary, ids, false, false, randomDictionaryId()); } @Override @@ -621,10 +636,6 @@ public DictionaryId getDictionarySourceId() public boolean isCompact() { - if (dictionary instanceof DictionaryBlock) { - return false; - } - if (uniqueIds == -1) { calculateCompactSize(); } @@ -637,11 +648,6 @@ public DictionaryBlock compact() return this; } - DictionaryBlock unnested = unnest(); - if (unnested != this) { - return unnested.compact(); - } - // determine which dictionary entries are referenced and build a reindex for them int dictionarySize = dictionary.getPositionCount(); IntArrayList dictionaryPositionsToCopy = new IntArrayList(min(dictionarySize, positionCount)); @@ -692,26 +698,76 @@ public DictionaryBlock compact() } } - private DictionaryBlock unnest() + /** + * Compact the dictionary down to only the used positions for a set of + * blocks that have been projected from the same dictionary. + */ + public static List compactRelatedBlocks(List blocks) { - if (!(dictionary instanceof DictionaryBlock)) { - return this; - } + DictionaryBlock firstDictionaryBlock = blocks.get(0); + Block dictionary = firstDictionaryBlock.getDictionary(); + + int positionCount = firstDictionaryBlock.getPositionCount(); + int dictionarySize = dictionary.getPositionCount(); + + // determine which dictionary entries are referenced and build a reindex for them + int[] dictionaryPositionsToCopy = new int[min(dictionarySize, positionCount)]; + int[] remapIndex = new int[dictionarySize]; + Arrays.fill(remapIndex, -1); - int[] ids = new int[positionCount]; + int numberOfIndexes = 0; for (int i = 0; i < positionCount; i++) { - ids[i] = getId(i); + int position = firstDictionaryBlock.getId(i); + if (remapIndex[position] == -1) { + dictionaryPositionsToCopy[numberOfIndexes] = position; + remapIndex[position] = numberOfIndexes; + numberOfIndexes++; + } + } + + // entire dictionary is referenced + if (numberOfIndexes == dictionarySize) { + return blocks; } - Block dictionary = this.dictionary; - while (dictionary instanceof DictionaryBlock) { - DictionaryBlock nestedDictionary = (DictionaryBlock) dictionary; - for (int i = 0; i < positionCount; i++) { - ids[i] = nestedDictionary.getId(ids[i]); + // compact the dictionaries + int[] newIds = getNewIds(positionCount, firstDictionaryBlock, remapIndex); + List outputDictionaryBlocks = new ArrayList<>(blocks.size()); + DictionaryId newDictionaryId = randomDictionaryId(); + for (DictionaryBlock dictionaryBlock : blocks) { + if (!firstDictionaryBlock.getDictionarySourceId().equals(dictionaryBlock.getDictionarySourceId())) { + throw new IllegalArgumentException("dictionarySourceIds must be the same"); + } + + try { + Block compactDictionary = dictionaryBlock.getDictionary().copyPositions(dictionaryPositionsToCopy, 0, numberOfIndexes); + outputDictionaryBlocks.add(new DictionaryBlock( + 0, + positionCount, + compactDictionary, + newIds, + !(compactDictionary instanceof DictionaryBlock), + false, + newDictionaryId)); + } + catch (UnsupportedOperationException e) { + // ignore if copy positions is not supported for the dictionary + outputDictionaryBlocks.add(dictionaryBlock); } - dictionary = nestedDictionary.getDictionary(); } + return outputDictionaryBlocks; + } - return new DictionaryBlock(dictionary, ids); + private static int[] getNewIds(int positionCount, DictionaryBlock dictionaryBlock, int[] remapIndex) + { + int[] newIds = new int[positionCount]; + for (int i = 0; i < positionCount; i++) { + int newId = remapIndex[dictionaryBlock.getId(i)]; + if (newId == -1) { + throw new IllegalStateException("reference to a non-existent key"); + } + newIds[i] = newId; + } + return newIds; } } diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/DictionaryBlockEncoding.java b/core/trino-spi/src/main/java/io/trino/spi/block/DictionaryBlockEncoding.java index 1c86d98cba2d..9c806ff92daf 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/DictionaryBlockEncoding.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/DictionaryBlockEncoding.java @@ -19,6 +19,8 @@ import java.util.Optional; +import static io.trino.spi.block.DictionaryBlock.createProjectedDictionaryBlock; + public class DictionaryBlockEncoding implements BlockEncoding { @@ -78,7 +80,7 @@ public Block readBlock(BlockEncodingSerde blockEncodingSerde, SliceInput sliceIn // We always compact the dictionary before we send it. However, dictionaryBlock comes from sliceInput, which may over-retain memory. // As a result, setting dictionaryIsCompacted to true is not appropriate here. // TODO: fix DictionaryBlock so that dictionaryIsCompacted can be set to true when the underlying block over-retains memory. - return new DictionaryBlock(positionCount, dictionaryBlock, ids, false, new DictionaryId(mostSignificantBits, leastSignificantBits, sequenceId)); + return createProjectedDictionaryBlock(positionCount, dictionaryBlock, ids, new DictionaryId(mostSignificantBits, leastSignificantBits, sequenceId)); } @Override diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/Int128ArrayBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/Int128ArrayBlockBuilder.java index 0a2f3d5ac27b..3b796c256de9 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/Int128ArrayBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/Int128ArrayBlockBuilder.java @@ -116,7 +116,7 @@ public BlockBuilder appendNull() public Block build() { if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } return new Int128ArrayBlock(0, positionCount, hasNullValue ? valueIsNull : null, values); } @@ -246,7 +246,7 @@ public Block copyPositions(int[] positions, int offset, int length) checkArrayRange(positions, offset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { @@ -271,7 +271,7 @@ public Block getRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } return new Int128ArrayBlock(positionOffset, length, hasNullValue ? valueIsNull : null, values); } @@ -282,7 +282,7 @@ public Block copyRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/Int96ArrayBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/Int96ArrayBlockBuilder.java index fc5096cb80ce..ce9a4aa91af6 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/Int96ArrayBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/Int96ArrayBlockBuilder.java @@ -138,7 +138,7 @@ public BlockBuilder appendNull() public Block build() { if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } return new Int96ArrayBlock(0, positionCount, hasNullValue ? valueIsNull : null, high, low); } @@ -278,7 +278,7 @@ public Block copyPositions(int[] positions, int offset, int length) checkArrayRange(positions, offset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { @@ -304,7 +304,7 @@ public Block getRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } return new Int96ArrayBlock(positionOffset, length, hasNullValue ? valueIsNull : null, high, low); } @@ -315,7 +315,7 @@ public Block copyRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/IntArrayBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/IntArrayBlockBuilder.java index 7418bde44118..2265f812e4bc 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/IntArrayBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/IntArrayBlockBuilder.java @@ -102,7 +102,7 @@ public BlockBuilder appendNull() public Block build() { if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } return new IntArrayBlock(0, positionCount, hasNullValue ? valueIsNull : null, values); } @@ -227,7 +227,7 @@ public Block copyPositions(int[] positions, int offset, int length) checkArrayRange(positions, offset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { @@ -251,7 +251,7 @@ public Block getRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } return new IntArrayBlock(positionOffset, length, hasNullValue ? valueIsNull : null, values); } @@ -262,7 +262,7 @@ public Block copyRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/LongArrayBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/LongArrayBlockBuilder.java index 656b44d57c5a..0175cb066d10 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/LongArrayBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/LongArrayBlockBuilder.java @@ -103,7 +103,7 @@ public BlockBuilder appendNull() public Block build() { if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } return new LongArrayBlock(0, positionCount, hasNullValue ? valueIsNull : null, values); } @@ -274,7 +274,7 @@ public Block copyPositions(int[] positions, int offset, int length) checkArrayRange(positions, offset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { @@ -298,7 +298,7 @@ public Block getRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } return new LongArrayBlock(positionOffset, length, hasNullValue ? valueIsNull : null, values); } @@ -309,7 +309,7 @@ public Block copyRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/RowBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/RowBlockBuilder.java index f21e0594e3cf..4d607ccf1727 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/RowBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/RowBlockBuilder.java @@ -284,7 +284,7 @@ public Block copyRegion(int position, int length) return super.copyRegion(position, length); } - private RunLengthEncodedBlock nullRle(int length) + private Block nullRle(int length) { Block[] fieldBlocks = new Block[numFields]; for (int i = 0; i < numFields; i++) { @@ -292,6 +292,6 @@ private RunLengthEncodedBlock nullRle(int length) } RowBlock nullRowBlock = createRowBlockInternal(0, 1, new boolean[] {true}, new int[] {0, 0}, fieldBlocks); - return new RunLengthEncodedBlock(nullRowBlock, length); + return RunLengthEncodedBlock.create(nullRowBlock, length); } } diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/RunLengthBlockEncoding.java b/core/trino-spi/src/main/java/io/trino/spi/block/RunLengthBlockEncoding.java index f78c5c76a6a8..b9cfc8acaf8f 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/RunLengthBlockEncoding.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/RunLengthBlockEncoding.java @@ -40,7 +40,7 @@ public void writeBlock(BlockEncodingSerde blockEncodingSerde, SliceOutput sliceO } @Override - public RunLengthEncodedBlock readBlock(BlockEncodingSerde blockEncodingSerde, SliceInput sliceInput) + public Block readBlock(BlockEncodingSerde blockEncodingSerde, SliceInput sliceInput) { // read the run length int positionCount = sliceInput.readInt(); @@ -48,6 +48,6 @@ public RunLengthEncodedBlock readBlock(BlockEncodingSerde blockEncodingSerde, Sl // read the value Block value = blockEncodingSerde.readBlock(sliceInput); - return new RunLengthEncodedBlock(value, positionCount); + return RunLengthEncodedBlock.create(value, positionCount); } } diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/RunLengthEncodedBlock.java b/core/trino-spi/src/main/java/io/trino/spi/block/RunLengthEncodedBlock.java index faab772d3d19..a7ee9a9a5310 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/RunLengthEncodedBlock.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/RunLengthEncodedBlock.java @@ -43,29 +43,55 @@ public static Block create(Type type, Object value, int positionCount) if (block instanceof RunLengthEncodedBlock) { block = ((RunLengthEncodedBlock) block).getValue(); } - return new RunLengthEncodedBlock(block, positionCount); + return create(block, positionCount); } - private final Block value; - private final int positionCount; - - public RunLengthEncodedBlock(Block value, int positionCount) + public static Block create(Block value, int positionCount) { requireNonNull(value, "value is null"); if (value.getPositionCount() != 1) { throw new IllegalArgumentException(format("Expected value to contain a single position but has %s positions", value.getPositionCount())); } - if (value instanceof RunLengthEncodedBlock) { - this.value = ((RunLengthEncodedBlock) value).getValue(); + if (positionCount == 0) { + return value.copyRegion(0, 0); } - else { - this.value = value; + if (positionCount == 1) { + return value; } + return new RunLengthEncodedBlock(value, positionCount); + } + + private final Block value; + private final int positionCount; + private RunLengthEncodedBlock(Block value, int positionCount) + { + requireNonNull(value, "value is null"); if (positionCount < 0) { throw new IllegalArgumentException("positionCount is negative"); } + if (positionCount < 2) { + throw new IllegalArgumentException("positionCount must be at least 2"); + } + + // do not nest an RLE or Dictionary in an RLE + if (value instanceof RunLengthEncodedBlock block) { + this.value = block.getValue(); + } + else if (value instanceof DictionaryBlock block) { + Block dictionary = block.getDictionary(); + int id = block.getId(0); + if (dictionary.getPositionCount() == 1 && id == 0) { + this.value = dictionary; + } + else { + this.value = dictionary.getRegion(id, 1); + } + } + else { + this.value = value; + } this.positionCount = positionCount; } @@ -81,6 +107,9 @@ public Block getValue() return value; } + /** + * Positions count will always be at least 2 + */ @Override public int getPositionCount() { @@ -137,7 +166,7 @@ public Block getPositions(int[] positions, int offset, int length) for (int i = offset; i < offset + length; i++) { checkValidPosition(positions[i], positionCount); } - return new RunLengthEncodedBlock(value, length); + return create(value, length); } @Override @@ -147,14 +176,14 @@ public Block copyPositions(int[] positions, int offset, int length) for (int i = offset; i < offset + length; i++) { checkValidPosition(positions[i], positionCount); } - return new RunLengthEncodedBlock(value.copyRegion(0, 1), length); + return create(value.copyRegion(0, 1), length); } @Override public Block getRegion(int positionOffset, int length) { checkValidRegion(positionCount, positionOffset, length); - return new RunLengthEncodedBlock(value, length); + return create(value, length); } @Override @@ -173,7 +202,7 @@ public long getPositionsSizeInBytes(@Nullable boolean[] positions, int selectedP public Block copyRegion(int positionOffset, int length) { checkValidRegion(positionCount, positionOffset, length); - return new RunLengthEncodedBlock(value.copyRegion(0, 1), length); + return create(value.copyRegion(0, 1), length); } @Override @@ -291,13 +320,13 @@ public boolean isNull(int position) public Block copyWithAppendedNull() { if (value.isNull(0)) { - return new RunLengthEncodedBlock(value, positionCount + 1); + return create(value, positionCount + 1); } Block dictionary = value.copyWithAppendedNull(); int[] ids = new int[positionCount + 1]; ids[positionCount] = 1; - return new DictionaryBlock(dictionary, ids); + return DictionaryBlock.create(ids.length, dictionary, ids); } @Override @@ -324,6 +353,6 @@ public Block getLoadedBlock() if (loadedValueBlock == value) { return this; } - return new RunLengthEncodedBlock(loadedValueBlock, positionCount); + return create(loadedValueBlock, positionCount); } } diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/ShortArrayBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/ShortArrayBlockBuilder.java index 1cff8648d486..205c8004cdc8 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/ShortArrayBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/ShortArrayBlockBuilder.java @@ -102,7 +102,7 @@ public BlockBuilder appendNull() public Block build() { if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positionCount); } return new ShortArrayBlock(0, positionCount, hasNullValue ? valueIsNull : null, values); } @@ -227,7 +227,7 @@ public Block copyPositions(int[] positions, int offset, int length) checkArrayRange(positions, offset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { @@ -251,7 +251,7 @@ public Block getRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } return new ShortArrayBlock(positionOffset, length, hasNullValue ? valueIsNull : null, values); } @@ -262,7 +262,7 @@ public Block copyRegion(int positionOffset, int length) checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/VariableWidthBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/VariableWidthBlockBuilder.java index cc37c9dfdd66..8e769c0ecb4e 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/VariableWidthBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/VariableWidthBlockBuilder.java @@ -164,7 +164,7 @@ public Block copyPositions(int[] positions, int offset, int length) checkArrayRange(positions, offset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } int finalLength = 0; @@ -329,7 +329,7 @@ public Block getRegion(int positionOffset, int length) checkValidRegion(positionCount, positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } return new VariableWidthBlock(positionOffset, length, sliceOutput.slice(), offsets, hasNullValue ? valueIsNull : null); @@ -341,7 +341,7 @@ public Block copyRegion(int positionOffset, int length) int positionCount = getPositionCount(); checkValidRegion(positionCount, positionOffset, length); if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, length); } int[] newOffsets = compactOffsets(offsets, positionOffset, length); @@ -361,7 +361,7 @@ public Block build() throw new IllegalStateException("Current entry must be closed before the block can be built"); } if (!hasNonNullValue) { - return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positions); + return RunLengthEncodedBlock.create(NULL_VALUE_BLOCK, positions); } return new VariableWidthBlock(0, positions, sliceOutput.slice(), offsets, hasNullValue ? valueIsNull : null); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/predicate/SortedRangeSet.java b/core/trino-spi/src/main/java/io/trino/spi/predicate/SortedRangeSet.java index f777d392b25c..e37d0ea87b43 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/predicate/SortedRangeSet.java +++ b/core/trino-spi/src/main/java/io/trino/spi/predicate/SortedRangeSet.java @@ -223,10 +223,7 @@ private static SortedRangeSet fromUnorderedValuesBlock(Type type, Block block) return new SortedRangeSet( type, inclusive, - new DictionaryBlock( - dictionaryIndex, - block, - dictionary)); + DictionaryBlock.create(dictionaryIndex, block, dictionary)); } /** @@ -259,7 +256,7 @@ private static SortedRangeSet of(Type type, Object value) return new SortedRangeSet( type, new boolean[] {true, true}, - new RunLengthEncodedBlock(block, 2)); + RunLengthEncodedBlock.create(block, 2)); } static SortedRangeSet copyOf(Type type, Collection ranges) diff --git a/core/trino-spi/src/test/java/io/trino/spi/TestPage.java b/core/trino-spi/src/test/java/io/trino/spi/TestPage.java index 0fc473029e18..e41220a2318d 100644 --- a/core/trino-spi/src/test/java/io/trino/spi/TestPage.java +++ b/core/trino-spi/src/test/java/io/trino/spi/TestPage.java @@ -25,12 +25,12 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Verify.verifyNotNull; import static io.airlift.slice.SizeOf.sizeOf; +import static io.trino.spi.block.DictionaryBlock.createProjectedDictionaryBlock; import static io.trino.spi.block.DictionaryId.randomDictionaryId; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.VarbinaryType.VARBINARY; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertTrue; @@ -85,20 +85,20 @@ public void testCompactDictionaryBlocks() // first dictionary contains "varbinary" values Slice[] dictionaryValues1 = createExpectedValues(50); Block dictionary1 = createSlicesBlock(dictionaryValues1); - DictionaryBlock commonSourceIdBlock1 = new DictionaryBlock(positionCount, dictionary1, commonDictionaryIds, commonSourceId); + Block commonSourceIdBlock1 = createProjectedDictionaryBlock(positionCount, dictionary1, commonDictionaryIds, commonSourceId); // second dictionary block is "length(firstColumn)" BlockBuilder dictionary2 = BIGINT.createBlockBuilder(null, dictionary1.getPositionCount()); for (Slice expectedValue : dictionaryValues1) { BIGINT.writeLong(dictionary2, expectedValue.length()); } - DictionaryBlock commonSourceIdBlock2 = new DictionaryBlock(positionCount, dictionary2.build(), commonDictionaryIds, commonSourceId); + Block commonSourceIdBlock2 = createProjectedDictionaryBlock(positionCount, dictionary2.build(), commonDictionaryIds, commonSourceId); // Create block with a different source id, dictionary size, used int otherDictionaryUsedPositions = 30; int[] otherDictionaryIds = getDictionaryIds(positionCount, otherDictionaryUsedPositions); Block dictionary3 = createSlicesBlock(createExpectedValues(70)); - DictionaryBlock randomSourceIdBlock = new DictionaryBlock(dictionary3, otherDictionaryIds); + Block randomSourceIdBlock = DictionaryBlock.create(otherDictionaryIds.length, dictionary3, otherDictionaryIds); Page page = new Page(commonSourceIdBlock1, randomSourceIdBlock, commonSourceIdBlock2); page.compact(); @@ -116,20 +116,6 @@ public void testCompactDictionaryBlocks() assertEquals(((DictionaryBlock) page.getBlock(0)).getDictionarySourceId(), ((DictionaryBlock) page.getBlock(2)).getDictionarySourceId()); } - @Test - public void testCompactNestedDictionary() - { - Slice[] expectedValues = createExpectedValues(10); - Block valuesBlock = createSlicesBlock(expectedValues); - DictionaryBlock nestedDictionary = new DictionaryBlock(valuesBlock, new int[] {0, 1, 2, 2, 4, 5}); - DictionaryBlock dictionary = new DictionaryBlock(nestedDictionary, new int[] {2, 3, 2, 0}); - - Page page = new Page(dictionary); - page.compact(); - // Page#compact does not unnest nested dictionaries - assertFalse(((DictionaryBlock) page.getBlock(0)).isCompact()); - } - @Test public void testGetPositions() { diff --git a/core/trino-spi/src/test/java/io/trino/spi/block/TestArrayBlockBuilder.java b/core/trino-spi/src/test/java/io/trino/spi/block/TestArrayBlockBuilder.java index 9b69753502b2..0ccb0c0fe3a6 100644 --- a/core/trino-spi/src/test/java/io/trino/spi/block/TestArrayBlockBuilder.java +++ b/core/trino-spi/src/test/java/io/trino/spi/block/TestArrayBlockBuilder.java @@ -78,18 +78,18 @@ public void testConcurrentWriting() public void testBuilderProducesNullRleForNullRows() { // empty block - assertIsNullRle(blockBuilder().build(), 0); + assertIsAllNulls(blockBuilder().build(), 0); // single null - assertIsNullRle(blockBuilder().appendNull().build(), 1); + assertIsAllNulls(blockBuilder().appendNull().build(), 1); // multiple nulls - assertIsNullRle(blockBuilder().appendNull().appendNull().build(), 2); + assertIsAllNulls(blockBuilder().appendNull().appendNull().build(), 2); BlockBuilder blockBuilder = blockBuilder().appendNull().appendNull(); - assertIsNullRle(blockBuilder.copyPositions(new int[] {0}, 0, 1), 1); - assertIsNullRle(blockBuilder.getRegion(0, 1), 1); - assertIsNullRle(blockBuilder.copyRegion(0, 1), 1); + assertIsAllNulls(blockBuilder.copyPositions(new int[] {0}, 0, 1), 1); + assertIsAllNulls(blockBuilder.getRegion(0, 1), 1); + assertIsAllNulls(blockBuilder.copyRegion(0, 1), 1); } private static BlockBuilder blockBuilder() @@ -97,10 +97,16 @@ private static BlockBuilder blockBuilder() return new ArrayBlockBuilder(BIGINT, null, 10); } - private void assertIsNullRle(Block block, int expectedPositionCount) + private static void assertIsAllNulls(Block block, int expectedPositionCount) { assertEquals(block.getPositionCount(), expectedPositionCount); - assertEquals(block.getClass(), RunLengthEncodedBlock.class); + if (expectedPositionCount <= 1) { + assertEquals(block.getClass(), ArrayBlock.class); + } + else { + assertEquals(block.getClass(), RunLengthEncodedBlock.class); + assertEquals(((RunLengthEncodedBlock) block).getValue().getClass(), ArrayBlock.class); + } if (expectedPositionCount > 0) { assertTrue(block.isNull(0)); } diff --git a/core/trino-spi/src/test/java/io/trino/spi/block/TestBlockRetainedSizeBreakdown.java b/core/trino-spi/src/test/java/io/trino/spi/block/TestBlockRetainedSizeBreakdown.java index 0b861cc09fba..d44d78c50cf9 100644 --- a/core/trino-spi/src/test/java/io/trino/spi/block/TestBlockRetainedSizeBreakdown.java +++ b/core/trino-spi/src/test/java/io/trino/spi/block/TestBlockRetainedSizeBreakdown.java @@ -65,7 +65,7 @@ public void testDictionaryBlock() for (int i = 0; i < keyIds.length; i++) { keyIds[i] = i; } - checkRetainedSize(new DictionaryBlock(EXPECTED_ENTRIES, keyDictionaryBlock, keyIds), false); + checkRetainedSize(DictionaryBlock.create(EXPECTED_ENTRIES, keyDictionaryBlock, keyIds), false); } @Test @@ -89,7 +89,7 @@ public void testRunLengthEncodedBlock() { BlockBuilder blockBuilder = new LongArrayBlockBuilder(null, 1); writeEntries(1, blockBuilder, BIGINT); - checkRetainedSize(new RunLengthEncodedBlock(blockBuilder.build(), 1), false); + checkRetainedSize(RunLengthEncodedBlock.create(blockBuilder.build(), 1), false); } @Test diff --git a/core/trino-spi/src/test/java/io/trino/spi/block/TestDictionaryBlockEncoding.java b/core/trino-spi/src/test/java/io/trino/spi/block/TestDictionaryBlockEncoding.java index 58243b672bfb..772ffed647f0 100644 --- a/core/trino-spi/src/test/java/io/trino/spi/block/TestDictionaryBlockEncoding.java +++ b/core/trino-spi/src/test/java/io/trino/spi/block/TestDictionaryBlockEncoding.java @@ -37,7 +37,7 @@ public void testRoundTrip() ids[i] = i % 4; } - DictionaryBlock dictionaryBlock = new DictionaryBlock(dictionary, ids); + DictionaryBlock dictionaryBlock = (DictionaryBlock) DictionaryBlock.create(ids.length, dictionary, ids); Block actualBlock = roundTripBlock(dictionaryBlock); assertTrue(actualBlock instanceof DictionaryBlock); @@ -53,7 +53,7 @@ public void testRoundTrip() public void testNonSequentialDictionaryUnnest() { int[] ids = new int[] {3, 2, 1, 0}; - DictionaryBlock dictionaryBlock = new DictionaryBlock(dictionary, ids); + DictionaryBlock dictionaryBlock = (DictionaryBlock) DictionaryBlock.create(ids.length, dictionary, ids); Block actualBlock = roundTripBlock(dictionaryBlock); assertTrue(actualBlock instanceof DictionaryBlock); @@ -64,7 +64,7 @@ public void testNonSequentialDictionaryUnnest() public void testNonSequentialDictionaryUnnestWithGaps() { int[] ids = new int[] {3, 2, 0}; - DictionaryBlock dictionaryBlock = new DictionaryBlock(dictionary, ids); + DictionaryBlock dictionaryBlock = (DictionaryBlock) DictionaryBlock.create(ids.length, dictionary, ids); Block actualBlock = roundTripBlock(dictionaryBlock); assertTrue(actualBlock instanceof VariableWidthBlock); @@ -75,25 +75,13 @@ public void testNonSequentialDictionaryUnnestWithGaps() public void testSequentialDictionaryUnnest() { int[] ids = new int[] {0, 1, 2, 3}; - DictionaryBlock dictionaryBlock = new DictionaryBlock(dictionary, ids); + DictionaryBlock dictionaryBlock = (DictionaryBlock) DictionaryBlock.create(ids.length, dictionary, ids); Block actualBlock = roundTripBlock(dictionaryBlock); assertTrue(actualBlock instanceof VariableWidthBlock); assertBlockEquals(VARCHAR, actualBlock, dictionary.getPositions(ids, 0, 4)); } - @Test - public void testNestedSequentialDictionaryUnnest() - { - int[] ids = new int[] {0, 1, 2, 3}; - DictionaryBlock nestedDictionaryBlock = new DictionaryBlock(dictionary, ids); - DictionaryBlock dictionary = new DictionaryBlock(nestedDictionaryBlock, ids); - - Block actualBlock = roundTripBlock(dictionary); - assertTrue(actualBlock instanceof VariableWidthBlock); - assertBlockEquals(VARCHAR, actualBlock, this.dictionary.getPositions(ids, 0, 4)); - } - private Block roundTripBlock(Block block) { DynamicSliceOutput sliceOutput = new DynamicSliceOutput(1024); @@ -101,7 +89,7 @@ private Block roundTripBlock(Block block) return blockEncodingSerde.readBlock(sliceOutput.slice().getInput()); } - private Block buildTestDictionary() + private static Block buildTestDictionary() { // build dictionary BlockBuilder dictionaryBuilder = VARCHAR.createBlockBuilder(null, 4); diff --git a/core/trino-spi/src/test/java/io/trino/spi/block/TestLazyBlock.java b/core/trino-spi/src/test/java/io/trino/spi/block/TestLazyBlock.java index 1428f3c43c8c..7efd90de459e 100644 --- a/core/trino-spi/src/test/java/io/trino/spi/block/TestLazyBlock.java +++ b/core/trino-spi/src/test/java/io/trino/spi/block/TestLazyBlock.java @@ -20,6 +20,7 @@ import java.util.List; import java.util.Optional; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; @@ -68,14 +69,18 @@ public void testNestedGetLoadedBlock() List actualNotifications = new ArrayList<>(); Block arrayBlock = new IntArrayBlock(1, Optional.empty(), new int[] {0}); LazyBlock lazyArrayBlock = new LazyBlock(1, () -> arrayBlock); - DictionaryBlock dictionaryBlock = new DictionaryBlock(lazyArrayBlock, new int[] {0}); - LazyBlock lazyBlock = new LazyBlock(1, () -> dictionaryBlock); + Block dictionaryBlock = DictionaryBlock.create(2, lazyArrayBlock, new int[] {0, 0}); + LazyBlock lazyBlock = new LazyBlock(2, () -> dictionaryBlock); LazyBlock.listenForLoads(lazyBlock, actualNotifications::add); Block loadedBlock = lazyBlock.getBlock(); + assertThat(loadedBlock).isInstanceOf(DictionaryBlock.class); + assertThat(((DictionaryBlock) loadedBlock).getDictionary()).isInstanceOf(LazyBlock.class); assertEquals(actualNotifications, ImmutableList.of(loadedBlock)); - lazyBlock.getLoadedBlock(); + Block fullyLoadedBlock = lazyBlock.getLoadedBlock(); + assertThat(fullyLoadedBlock).isInstanceOf(DictionaryBlock.class); + assertThat(((DictionaryBlock) fullyLoadedBlock).getDictionary()).isInstanceOf(IntArrayBlock.class); assertEquals(actualNotifications, ImmutableList.of(loadedBlock, arrayBlock)); assertTrue(lazyBlock.isLoaded()); assertTrue(dictionaryBlock.isLoaded()); diff --git a/core/trino-spi/src/test/java/io/trino/spi/block/TestRowBlockBuilder.java b/core/trino-spi/src/test/java/io/trino/spi/block/TestRowBlockBuilder.java index 04cb14fd9723..7f7ec79e2e7c 100644 --- a/core/trino-spi/src/test/java/io/trino/spi/block/TestRowBlockBuilder.java +++ b/core/trino-spi/src/test/java/io/trino/spi/block/TestRowBlockBuilder.java @@ -26,18 +26,18 @@ public class TestRowBlockBuilder public void testBuilderProducesNullRleForNullRows() { // empty block - assertIsNullRle(blockBuilder().build(), 0); + assertIsAllNulls(blockBuilder().build(), 0); // single null - assertIsNullRle(blockBuilder().appendNull().build(), 1); + assertIsAllNulls(blockBuilder().appendNull().build(), 1); // multiple nulls - assertIsNullRle(blockBuilder().appendNull().appendNull().build(), 2); + assertIsAllNulls(blockBuilder().appendNull().appendNull().build(), 2); BlockBuilder blockBuilder = blockBuilder().appendNull().appendNull(); - assertIsNullRle(blockBuilder.copyPositions(new int[] {0}, 0, 1), 1); - assertIsNullRle(blockBuilder.getRegion(0, 1), 1); - assertIsNullRle(blockBuilder.copyRegion(0, 1), 1); + assertIsAllNulls(blockBuilder.copyPositions(new int[] {0}, 0, 1), 1); + assertIsAllNulls(blockBuilder.getRegion(0, 1), 1); + assertIsAllNulls(blockBuilder.copyRegion(0, 1), 1); } private static BlockBuilder blockBuilder() @@ -45,10 +45,16 @@ private static BlockBuilder blockBuilder() return new RowBlockBuilder(ImmutableList.of(BIGINT), null, 10); } - private void assertIsNullRle(Block block, int expectedPositionCount) + private static void assertIsAllNulls(Block block, int expectedPositionCount) { assertEquals(block.getPositionCount(), expectedPositionCount); - assertEquals(block.getClass(), RunLengthEncodedBlock.class); + if (expectedPositionCount <= 1) { + assertEquals(block.getClass(), RowBlock.class); + } + else { + assertEquals(block.getClass(), RunLengthEncodedBlock.class); + assertEquals(((RunLengthEncodedBlock) block).getValue().getClass(), RowBlock.class); + } if (expectedPositionCount > 0) { assertTrue(block.isNull(0)); } diff --git a/core/trino-spi/src/test/java/io/trino/spi/block/TestVariableWidthBlockBuilder.java b/core/trino-spi/src/test/java/io/trino/spi/block/TestVariableWidthBlockBuilder.java index acc67b90bb88..cef1e175afeb 100644 --- a/core/trino-spi/src/test/java/io/trino/spi/block/TestVariableWidthBlockBuilder.java +++ b/core/trino-spi/src/test/java/io/trino/spi/block/TestVariableWidthBlockBuilder.java @@ -74,18 +74,18 @@ private void testIsFull(PageBuilderStatus pageBuilderStatus) public void testBuilderProducesNullRleForNullRows() { // empty block - assertIsNullRle(blockBuilder().build(), 0); + assertIsAllNulls(blockBuilder().build(), 0); // single null - assertIsNullRle(blockBuilder().appendNull().build(), 1); + assertIsAllNulls(blockBuilder().appendNull().build(), 1); // multiple nulls - assertIsNullRle(blockBuilder().appendNull().appendNull().build(), 2); + assertIsAllNulls(blockBuilder().appendNull().appendNull().build(), 2); BlockBuilder blockBuilder = blockBuilder().appendNull().appendNull(); - assertIsNullRle(blockBuilder.copyPositions(new int[] {0}, 0, 1), 1); - assertIsNullRle(blockBuilder.getRegion(0, 1), 1); - assertIsNullRle(blockBuilder.copyRegion(0, 1), 1); + assertIsAllNulls(blockBuilder.copyPositions(new int[] {0}, 0, 1), 1); + assertIsAllNulls(blockBuilder.getRegion(0, 1), 1); + assertIsAllNulls(blockBuilder.copyRegion(0, 1), 1); } private static BlockBuilder blockBuilder() @@ -93,10 +93,16 @@ private static BlockBuilder blockBuilder() return new VariableWidthBlockBuilder(null, 10, 0); } - private void assertIsNullRle(Block block, int expectedPositionCount) + private static void assertIsAllNulls(Block block, int expectedPositionCount) { assertEquals(block.getPositionCount(), expectedPositionCount); - assertEquals(block.getClass(), RunLengthEncodedBlock.class); + if (expectedPositionCount <= 1) { + assertEquals(block.getClass(), VariableWidthBlock.class); + } + else { + assertEquals(block.getClass(), RunLengthEncodedBlock.class); + assertEquals(((RunLengthEncodedBlock) block).getValue().getClass(), VariableWidthBlock.class); + } if (expectedPositionCount > 0) { assertTrue(block.isNull(0)); } diff --git a/lib/trino-orc/src/main/java/io/trino/orc/reader/SliceDictionaryColumnReader.java b/lib/trino-orc/src/main/java/io/trino/orc/reader/SliceDictionaryColumnReader.java index d7d8caceee90..f4c1097adccf 100644 --- a/lib/trino-orc/src/main/java/io/trino/orc/reader/SliceDictionaryColumnReader.java +++ b/lib/trino-orc/src/main/java/io/trino/orc/reader/SliceDictionaryColumnReader.java @@ -163,9 +163,9 @@ else if (nullCount != nextBatchSize) { return block; } - private RunLengthEncodedBlock readAllNullsBlock() + private Block readAllNullsBlock() { - return new RunLengthEncodedBlock(new VariableWidthBlock(1, EMPTY_SLICE, new int[2], Optional.of(new boolean[] {true})), nextBatchSize); + return RunLengthEncodedBlock.create(new VariableWidthBlock(1, EMPTY_SLICE, new int[2], Optional.of(new boolean[] {true})), nextBatchSize); } private Block readNonNullBlock() @@ -174,7 +174,7 @@ private Block readNonNullBlock() verifyNotNull(dataStream); int[] values = new int[nextBatchSize]; dataStream.next(values, nextBatchSize); - return new DictionaryBlock(nextBatchSize, dictionaryBlock, values); + return DictionaryBlock.create(nextBatchSize, dictionaryBlock, values); } private Block readNullBlock(boolean[] isNull, int nonNullCount) @@ -205,7 +205,7 @@ private Block readNullBlock(boolean[] isNull, int nonNullCount) result[nonNullPositionList[i]] = nonNullValueTemp[i]; } - return new DictionaryBlock(nextBatchSize, dictionaryBlock, result); + return DictionaryBlock.create(nextBatchSize, dictionaryBlock, result); } private void setDictionaryBlockData(byte[] dictionaryData, int[] dictionaryOffsets, int positionCount) diff --git a/lib/trino-orc/src/main/java/io/trino/orc/reader/SliceDirectColumnReader.java b/lib/trino-orc/src/main/java/io/trino/orc/reader/SliceDirectColumnReader.java index afe258cbf976..25ecae34c636 100644 --- a/lib/trino-orc/src/main/java/io/trino/orc/reader/SliceDirectColumnReader.java +++ b/lib/trino-orc/src/main/java/io/trino/orc/reader/SliceDirectColumnReader.java @@ -230,9 +230,9 @@ public Block readBlock() return new VariableWidthBlock(currentBatchSize, slice, offsetVector, Optional.ofNullable(isNullVector)); } - private RunLengthEncodedBlock readAllNullsBlock() + private Block readAllNullsBlock() { - return new RunLengthEncodedBlock(new VariableWidthBlock(1, EMPTY_SLICE, new int[2], Optional.of(new boolean[] {true})), nextBatchSize); + return RunLengthEncodedBlock.create(new VariableWidthBlock(1, EMPTY_SLICE, new int[2], Optional.of(new boolean[] {true})), nextBatchSize); } private void openRowGroup() diff --git a/lib/trino-orc/src/main/java/io/trino/orc/reader/UnionColumnReader.java b/lib/trino-orc/src/main/java/io/trino/orc/reader/UnionColumnReader.java index 9a4adce2afe5..6d4ab7c2a654 100644 --- a/lib/trino-orc/src/main/java/io/trino/orc/reader/UnionColumnReader.java +++ b/lib/trino-orc/src/main/java/io/trino/orc/reader/UnionColumnReader.java @@ -259,7 +259,7 @@ private Block[] getBlocks(int positionCount) blocks[i + 1] = new LazyBlock(positionCount, new UnpackLazyBlockLoader(rawBlock, fieldType, valueIsNonNull[i])); } else { - blocks[i + 1] = new RunLengthEncodedBlock( + blocks[i + 1] = RunLengthEncodedBlock.create( fieldType.createBlockBuilder(null, 1).appendNull().build(), positionCount); } diff --git a/lib/trino-orc/src/main/java/io/trino/orc/reader/UuidColumnReader.java b/lib/trino-orc/src/main/java/io/trino/orc/reader/UuidColumnReader.java index 18cda4c49851..ba14b28bb1a5 100644 --- a/lib/trino-orc/src/main/java/io/trino/orc/reader/UuidColumnReader.java +++ b/lib/trino-orc/src/main/java/io/trino/orc/reader/UuidColumnReader.java @@ -255,9 +255,9 @@ private long[] readNonNullLongs(int valueCount) return values; } - private RunLengthEncodedBlock createAllNullsBlock() + private Block createAllNullsBlock() { - return new RunLengthEncodedBlock(new Int128ArrayBlock(1, Optional.of(new boolean[] {true}), new long[2]), nextBatchSize); + return RunLengthEncodedBlock.create(new Int128ArrayBlock(1, Optional.of(new boolean[] {true}), new long[2]), nextBatchSize); } private void openRowGroup() diff --git a/lib/trino-orc/src/main/java/io/trino/orc/writer/SliceDictionaryColumnWriter.java b/lib/trino-orc/src/main/java/io/trino/orc/writer/SliceDictionaryColumnWriter.java index 1ebc555bd904..b91731be3952 100644 --- a/lib/trino-orc/src/main/java/io/trino/orc/writer/SliceDictionaryColumnWriter.java +++ b/lib/trino-orc/src/main/java/io/trino/orc/writer/SliceDictionaryColumnWriter.java @@ -216,7 +216,7 @@ private boolean writeDictionaryRowGroup(Block dictionary, int valueCount, IntBig for (int i = 0; valueCount > 0 && i < segments.length; i++) { int[] segment = segments[i]; int positionCount = Math.min(valueCount, segment.length); - Block block = new DictionaryBlock(positionCount, dictionary, segment); + Block block = DictionaryBlock.create(positionCount, dictionary, segment); while (block != null) { int chunkPositionCount = block.getPositionCount(); diff --git a/lib/trino-rcfile/src/main/java/io/trino/rcfile/RcFileReader.java b/lib/trino-rcfile/src/main/java/io/trino/rcfile/RcFileReader.java index 65ad85861341..7ab8849fb06b 100644 --- a/lib/trino-rcfile/src/main/java/io/trino/rcfile/RcFileReader.java +++ b/lib/trino-rcfile/src/main/java/io/trino/rcfile/RcFileReader.java @@ -428,7 +428,7 @@ public Block readBlock(int columnIndex) if (columnIndex >= columns.length) { Type type = readColumns.get(columnIndex); Block nullBlock = type.createBlockBuilder(null, 1, 0).appendNull().build(); - return new RunLengthEncodedBlock(nullBlock, currentChunkRowCount); + return RunLengthEncodedBlock.create(nullBlock, currentChunkRowCount); } return columns[columnIndex].readBlock(rowGroupPosition, currentChunkRowCount); diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSource.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSource.java index ac66f8564232..ed881f55d929 100644 --- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSource.java +++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSource.java @@ -165,7 +165,7 @@ public Page getNextPage() Block[] blocks = new Block[prefilledBlocks.length]; for (int i = 0; i < prefilledBlocks.length; i++) { if (prefilledBlocks[i] != null) { - blocks[i] = new RunLengthEncodedBlock(prefilledBlocks[i], batchSize); + blocks[i] = RunLengthEncodedBlock.create(prefilledBlocks[i], batchSize); } else if (i == rowIdIndex) { blocks[i] = createRowIdBlock(dataPage.getBlock(delegateIndexes[i])); @@ -187,9 +187,9 @@ private Block createRowIdBlock(Block rowIndexBlock) { int positions = rowIndexBlock.getPositionCount(); Block[] fields = { - new RunLengthEncodedBlock(pathBlock, positions), + RunLengthEncodedBlock.create(pathBlock, positions), rowIndexBlock, - new RunLengthEncodedBlock(partitionsBlock, positions), + RunLengthEncodedBlock.create(partitionsBlock, positions), }; return RowBlock.fromFieldBlocks(positions, Optional.empty(), fields); } diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialPartitioningInternalAggregation.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialPartitioningInternalAggregation.java index 798053d819fc..5777933ce193 100644 --- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialPartitioningInternalAggregation.java +++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialPartitioningInternalAggregation.java @@ -73,7 +73,7 @@ public void test(int partitionCount) List geometries = makeGeometries(); Block geometryBlock = makeGeometryBlock(geometries); - Block partitionCountBlock = BlockAssertions.createRLEBlock(partitionCount, geometries.size()); + Block partitionCountBlock = BlockAssertions.createRepeatedValuesBlock(partitionCount, geometries.size()); Rectangle expectedExtent = new Rectangle(-10, -10, Math.nextUp(10.0), Math.nextUp(10.0)); String expectedValue = getSpatialPartitioning(expectedExtent, geometries, partitionCount); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/AbstractHiveAcidWriters.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/AbstractHiveAcidWriters.java index 4f00581f8eb2..ba306973ce34 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/AbstractHiveAcidWriters.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/AbstractHiveAcidWriters.java @@ -152,12 +152,12 @@ public static Page buildDeletePage(Block rowIdsRowBlock, long writeId, Block row int positionCount = rowIdsRowBlock.getPositionCount(); // We've verified that the rowIds block has no null rows, so it's okay to get the field blocks Block[] blockArray = { - new RunLengthEncodedBlock(DELETE_OPERATION_BLOCK, positionCount), + RunLengthEncodedBlock.create(DELETE_OPERATION_BLOCK, positionCount), columnarRow.getField(ORIGINAL_TRANSACTION_CHANNEL), columnarRow.getField(BUCKET_CHANNEL), columnarRow.getField(ROW_ID_CHANNEL), RunLengthEncodedBlock.create(BIGINT, writeId, positionCount), - new RunLengthEncodedBlock(rowTypeNullsBlock, positionCount), + RunLengthEncodedBlock.create(rowTypeNullsBlock, positionCount), }; return new Page(blockArray); } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java index 84d036c9e1fb..835c329b224b 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java @@ -482,7 +482,7 @@ else if (i < rowBlock.getFieldCount()) { fields[i] = rowBlock.getField(i); } else { - fields[i] = new DictionaryBlock(nullBlocks[i], ids); + fields[i] = DictionaryBlock.create(ids.length, nullBlocks[i], ids); } } boolean[] valueIsNull = null; diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveUpdatablePageSource.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveUpdatablePageSource.java index 955f2e3c2cb3..74af4f9cb68d 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveUpdatablePageSource.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveUpdatablePageSource.java @@ -131,12 +131,12 @@ private void deleteRowsInternal(ColumnarRow columnarRow) Block originalTransactionChannel = columnarRow.getField(ORIGINAL_TRANSACTION_CHANNEL); Block[] blockArray = { - new RunLengthEncodedBlock(DELETE_OPERATION_BLOCK, positionCount), + RunLengthEncodedBlock.create(DELETE_OPERATION_BLOCK, positionCount), originalTransactionChannel, columnarRow.getField(BUCKET_CHANNEL), columnarRow.getField(ROW_ID_CHANNEL), RunLengthEncodedBlock.create(BIGINT, writeId, positionCount), - new RunLengthEncodedBlock(hiveRowTypeNullsBlock, positionCount), + RunLengthEncodedBlock.create(hiveRowTypeNullsBlock, positionCount), }; Page deletePage = new Page(blockArray); @@ -165,7 +165,7 @@ public void updateRows(Page page, List columnValueAndRowIdChannels) Block currentTransactionBlock = RunLengthEncodedBlock.create(BIGINT, writeId, positionCount); Block[] blockArray = { - new RunLengthEncodedBlock(INSERT_OPERATION_BLOCK, positionCount), + RunLengthEncodedBlock.create(INSERT_OPERATION_BLOCK, positionCount), currentTransactionBlock, acidBlock.getField(BUCKET_CHANNEL), createRowIdBlock(positionCount), diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/MergeFileWriter.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/MergeFileWriter.java index 332cb50d890f..12b88c9240fc 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/MergeFileWriter.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/MergeFileWriter.java @@ -110,9 +110,9 @@ public static Page buildInsertPage(Page insertPage, long writeId, List originalFilesBlockBuilder = ImmutableList.builder(); originalFilesBlockBuilder.add( - new RunLengthEncodedBlock(ORIGINAL_FILE_TRANSACTION_ID_BLOCK, positionCount), - new RunLengthEncodedBlock(bucketBlock, positionCount), + RunLengthEncodedBlock.create(ORIGINAL_FILE_TRANSACTION_ID_BLOCK, positionCount), + RunLengthEncodedBlock.create(bucketBlock, positionCount), createRowNumberBlock(startingRowId, filePosition, positionCount)); for (int channel = 0; channel < sourcePage.getChannelCount(); channel++) { originalFilesBlockBuilder.add(sourcePage.getBlock(channel)); @@ -524,8 +524,8 @@ public Block block(Page sourcePage, MaskDeletedRowsFunction maskDeletedRowsFunct positionCount, Optional.empty(), new Block[] { - new RunLengthEncodedBlock(ORIGINAL_FILE_TRANSACTION_ID_BLOCK, positionCount), - new RunLengthEncodedBlock(bucketBlock, positionCount), + RunLengthEncodedBlock.create(ORIGINAL_FILE_TRANSACTION_ID_BLOCK, positionCount), + RunLengthEncodedBlock.create(bucketBlock, positionCount), createRowNumberBlock(startingRowId, filePosition, positionCount) })); } @@ -551,8 +551,8 @@ public Block block(Page sourcePage, MaskDeletedRowsFunction maskDeletedRowsFunct positionCount, Optional.empty(), new Block[] { - new RunLengthEncodedBlock(ORIGINAL_FILE_TRANSACTION_ID_BLOCK, positionCount), - new RunLengthEncodedBlock(bucketBlock, positionCount), + RunLengthEncodedBlock.create(ORIGINAL_FILE_TRANSACTION_ID_BLOCK, positionCount), + RunLengthEncodedBlock.create(bucketBlock, positionCount), createRowNumberBlock(startingRowId, filePosition, positionCount) })); return rowBlock; @@ -574,7 +574,7 @@ public ConstantAdaptation(Block singleValueBlock) @Override public Block block(Page sourcePage, MaskDeletedRowsFunction maskDeletedRowsFunction, long filePosition, OptionalLong startRowId) { - return new RunLengthEncodedBlock(singleValueBlock, sourcePage.getPositionCount()); + return RunLengthEncodedBlock.create(singleValueBlock, sourcePage.getPositionCount()); } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetFileWriter.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetFileWriter.java index 67ab1d3f4d36..ca5511ded8aa 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetFileWriter.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetFileWriter.java @@ -121,7 +121,7 @@ public void appendRows(Page dataPage) for (int i = 0; i < fileInputColumnIndexes.length; i++) { int inputColumnIndex = fileInputColumnIndexes[i]; if (inputColumnIndex < 0) { - blocks[i] = new RunLengthEncodedBlock(nullBlocks.get(i), dataPage.getPositionCount()); + blocks[i] = RunLengthEncodedBlock.create(nullBlocks.get(i), dataPage.getPositionCount()); } else { blocks[i] = dataPage.getBlock(inputColumnIndex); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rcfile/RcFilePageSource.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rcfile/RcFilePageSource.java index 7f05ea64bf3e..918ff1942236 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rcfile/RcFilePageSource.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rcfile/RcFilePageSource.java @@ -129,7 +129,7 @@ public Page getNextPage() Block[] blocks = new Block[hiveColumnIndexes.length]; for (int fieldId = 0; fieldId < blocks.length; fieldId++) { if (constantBlocks[fieldId] != null) { - blocks[fieldId] = new RunLengthEncodedBlock(constantBlocks[fieldId], currentPageSize); + blocks[fieldId] = RunLengthEncodedBlock.create(constantBlocks[fieldId], currentPageSize); } else { blocks[fieldId] = createBlock(currentPageSize, fieldId); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcDeletedRows.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcDeletedRows.java index 2e0159685b64..715f219449af 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcDeletedRows.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcDeletedRows.java @@ -190,7 +190,7 @@ private Page createTestPage(int originalTransactionStart, int originalTransactio return new Page( size, originalTransaction.build(), - new RunLengthEncodedBlock(bucketBlock, size), - new RunLengthEncodedBlock(rowIdBlock, size)); + RunLengthEncodedBlock.create(bucketBlock, size), + RunLengthEncodedBlock.create(rowIdBlock, size)); } } diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ConstantPopulatingPageSource.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ConstantPopulatingPageSource.java index fb209af0fa7d..317fad673fc8 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ConstantPopulatingPageSource.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ConstantPopulatingPageSource.java @@ -80,7 +80,7 @@ public Page getNextPage() for (int targetChannel = 0; targetChannel < size; targetChannel++) { Block constantValue = constantColumns[targetChannel]; if (constantValue != null) { - blocks[targetChannel] = new RunLengthEncodedBlock(constantValue, delegatePage.getPositionCount()); + blocks[targetChannel] = RunLengthEncodedBlock.create(constantValue, delegatePage.getPositionCount()); } else { blocks[targetChannel] = delegatePage.getBlock(targetChannelToSourceChannel[targetChannel]); diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionTransforms.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionTransforms.java index 4f6e1ee5a234..57c2b75be741 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionTransforms.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionTransforms.java @@ -689,7 +689,7 @@ private static ColumnTransform voidTransform(Type type) type, true, true, - block -> new RunLengthEncodedBlock(nullBlock, block.getPositionCount()), + block -> RunLengthEncodedBlock.create(nullBlock, block.getPositionCount()), (block, position) -> null); } diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/delete/IcebergPositionDeletePageSink.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/delete/IcebergPositionDeletePageSink.java index 60e78391dd98..2063b9a6b101 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/delete/IcebergPositionDeletePageSink.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/delete/IcebergPositionDeletePageSink.java @@ -111,7 +111,7 @@ public CompletableFuture appendPage(Page page) checkArgument(page.getChannelCount() == 1, "IcebergPositionDeletePageSink expected a Page with only one channel, but got " + page.getChannelCount()); Block[] blocks = new Block[2]; - blocks[0] = new RunLengthEncodedBlock(nativeValueToBlock(VARCHAR, utf8Slice(dataFilePath)), page.getPositionCount()); + blocks[0] = RunLengthEncodedBlock.create(nativeValueToBlock(VARCHAR, utf8Slice(dataFilePath)), page.getPositionCount()); blocks[1] = page.getBlock(0); writer.appendRows(new Page(blocks)); diff --git a/plugin/trino-raptor-legacy/src/main/java/io/trino/plugin/raptor/legacy/storage/RaptorPageSource.java b/plugin/trino-raptor-legacy/src/main/java/io/trino/plugin/raptor/legacy/storage/RaptorPageSource.java index 1c0131167f6a..0386d1beedd0 100644 --- a/plugin/trino-raptor-legacy/src/main/java/io/trino/plugin/raptor/legacy/storage/RaptorPageSource.java +++ b/plugin/trino-raptor-legacy/src/main/java/io/trino/plugin/raptor/legacy/storage/RaptorPageSource.java @@ -233,7 +233,7 @@ public ShardUuidAdaptation(UUID shardUuid) @Override public Block block(Page sourcePage, long filePosition) { - return new RunLengthEncodedBlock(shardUuidBlock, sourcePage.getPositionCount()); + return RunLengthEncodedBlock.create(shardUuidBlock, sourcePage.getPositionCount()); } @Override @@ -288,8 +288,8 @@ public MergeRowIdColumn(OptionalInt bucketNumber, UUID shardUuid) @Override public Block block(Page sourcePage, long filePosition) { - Block bucketNumberBlock = new RunLengthEncodedBlock(bucketNumberValue, sourcePage.getPositionCount()); - Block shardUuidBlock = new RunLengthEncodedBlock(shardUuidValue, sourcePage.getPositionCount()); + Block bucketNumberBlock = RunLengthEncodedBlock.create(bucketNumberValue, sourcePage.getPositionCount()); + Block shardUuidBlock = RunLengthEncodedBlock.create(shardUuidValue, sourcePage.getPositionCount()); Block rowIdBlock = RowIdColumn.INSTANCE.block(sourcePage, filePosition); return RowBlock.fromFieldBlocks( sourcePage.getPositionCount(), @@ -315,7 +315,7 @@ public NullColumn(Type type) @Override public Block block(Page sourcePage, long filePosition) { - return new RunLengthEncodedBlock(nullBlock, sourcePage.getPositionCount()); + return RunLengthEncodedBlock.create(nullBlock, sourcePage.getPositionCount()); } @Override @@ -342,7 +342,7 @@ public BucketNumberColumn(int bucketNumber) @Override public Block block(Page sourcePage, long filePosition) { - return new RunLengthEncodedBlock(bucketNumberBlock, sourcePage.getPositionCount()); + return RunLengthEncodedBlock.create(bucketNumberBlock, sourcePage.getPositionCount()); } @Override