Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Formatting fixes #20330

Merged
merged 2 commits into from
Jan 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ private void rehash(int minimumRequiredCapacity)

// we incrementally allocate the record groups to smooth out memory allocation
if (capacity <= RECORDS_PER_GROUP) {
recordGroups = new byte[][]{new byte[multiplyExact(capacity, recordSize)]};
recordGroups = new byte[][] {new byte[multiplyExact(capacity, recordSize)]};
}
else {
recordGroups = new byte[(capacity + 1) >> RECORDS_PER_GROUP_SHIFT][];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ public FlatSet(
private static byte[][] createRecordGroups(int capacity, int recordSize)
{
if (capacity < RECORDS_PER_GROUP) {
return new byte[][]{new byte[multiplyExact(capacity, recordSize)]};
return new byte[][] {new byte[multiplyExact(capacity, recordSize)]};
}

byte[][] groups = new byte[(capacity + 1) >> RECORDS_PER_GROUP_SHIFT][];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ public AbstractMapAggregationState(AbstractMapAggregationState state)
private static byte[][] createRecordGroups(int capacity, int recordSize)
{
if (capacity < RECORDS_PER_GROUP) {
return new byte[][]{new byte[multiplyExact(capacity, recordSize)]};
return new byte[][] {new byte[multiplyExact(capacity, recordSize)]};
}

byte[][] groups = new byte[(capacity + 1) >> RECORDS_PER_GROUP_SHIFT][];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ public TypedHistogram(
private static byte[][] createRecordGroups(int capacity, int recordSize)
{
if (capacity < RECORDS_PER_GROUP) {
return new byte[][]{new byte[multiplyExact(capacity, recordSize)]};
return new byte[][] {new byte[multiplyExact(capacity, recordSize)]};
}

byte[][] groups = new byte[(capacity + 1) >> RECORDS_PER_GROUP_SHIFT][];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ public AbstractMultimapAggregationState(AbstractMultimapAggregationState state)
private static byte[][] createRecordGroups(int capacity, int recordSize)
{
if (capacity < RECORDS_PER_GROUP) {
return new byte[][]{new byte[multiplyExact(capacity, recordSize)]};
return new byte[][] {new byte[multiplyExact(capacity, recordSize)]};
}

byte[][] groups = new byte[(capacity + 1) >> RECORDS_PER_GROUP_SHIFT][];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ private static final class Visitor
extends AstVisitor<Node, Void>
{
private static final Query EMPTY_INPUT = createDesctibeInputQuery(
new Row[]{row(
new Row[] {row(
new Cast(new NullLiteral(), toSqlType(BIGINT)),
new Cast(new NullLiteral(), toSqlType(VARCHAR)))},
Optional.of(new Limit(new LongLiteral("0"))));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ private static final class Visitor
extends AstVisitor<Node, Void>
{
private static final Query EMPTY_OUTPUT = createDesctibeOutputQuery(
new Row[]{row(
new Row[] {row(
new Cast(new NullLiteral(), toSqlType(VARCHAR)),
new Cast(new NullLiteral(), toSqlType(VARCHAR)),
new Cast(new NullLiteral(), toSqlType(VARCHAR)),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,11 @@ public void testFromFieldBlocksNoNullsDetection()
{
// Blocks does not discard the null mask during creation if no values are null
boolean[] rowIsNull = new boolean[5];
assertThat(fromNotNullSuppressedFieldBlocks(5, Optional.of(rowIsNull), new Block[] {new ByteArrayBlock(5, Optional.empty(), createExpectedValue(5).getBytes())}).mayHaveNull()).isTrue();
assertThat(fromNotNullSuppressedFieldBlocks(5, Optional.of(rowIsNull), new Block[] {
new ByteArrayBlock(5, Optional.empty(), createExpectedValue(5).getBytes())}).mayHaveNull()).isTrue();
rowIsNull[rowIsNull.length - 1] = true;
assertThat(fromNotNullSuppressedFieldBlocks(5, Optional.of(rowIsNull), new Block[] {new ByteArrayBlock(5, Optional.of(rowIsNull), createExpectedValue(5).getBytes())}).mayHaveNull()).isTrue();
assertThat(fromNotNullSuppressedFieldBlocks(5, Optional.of(rowIsNull), new Block[] {
new ByteArrayBlock(5, Optional.of(rowIsNull), createExpectedValue(5).getBytes())}).mayHaveNull()).isTrue();

// Empty blocks have no nulls and can also discard their null mask
assertThat(fromNotNullSuppressedFieldBlocks(0, Optional.of(new boolean[0]), new Block[] {new ByteArrayBlock(0, Optional.empty(), new byte[0])}).mayHaveNull()).isFalse();
Expand Down Expand Up @@ -101,7 +103,7 @@ public void testCompactBlock()

// NOTE: nested row blocks are required to have the exact same size so they are always compact
assertCompact(fromFieldBlocks(0, new Block[] {emptyBlock, emptyBlock}));
assertCompact(fromNotNullSuppressedFieldBlocks(rowIsNull.length, Optional.of(rowIsNull), new Block[]{
assertCompact(fromNotNullSuppressedFieldBlocks(rowIsNull.length, Optional.of(rowIsNull), new Block[] {
new ByteArrayBlock(6, Optional.of(rowIsNull), createExpectedValue(6).getBytes()),
new ByteArrayBlock(6, Optional.of(rowIsNull), createExpectedValue(6).getBytes())}));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public class TestSimplePagesHashStrategy
@Test
public void testHashRowWithIntegerType()
{
Block block = new IntArrayBlock(1, Optional.empty(), new int[]{1234});
Block block = new IntArrayBlock(1, Optional.empty(), new int[] {1234});
SimplePagesHashStrategy strategy = createSimplePagesHashStrategy(INTEGER, ImmutableList.of(block));
Page page = new Page(block);

Expand All @@ -51,9 +51,9 @@ public void testHashRowWithMapType()
MapType mapType = new MapType(INTEGER, INTEGER, new TypeOperators());
Block block = mapType.createBlockFromKeyValue(
Optional.empty(),
new int[]{0, 1},
new IntArrayBlock(1, Optional.empty(), new int[]{1234}),
new IntArrayBlock(1, Optional.empty(), new int[]{5678}));
new int[] {0, 1},
new IntArrayBlock(1, Optional.empty(), new int[] {1234}),
new IntArrayBlock(1, Optional.empty(), new int[] {5678}));

SimplePagesHashStrategy strategy = createSimplePagesHashStrategy(mapType, ImmutableList.of(block));
Page page = new Page(block);
Expand All @@ -67,9 +67,9 @@ public void testRowEqualsRowWithIntegerType()
{
SimplePagesHashStrategy strategy = createSimplePagesHashStrategy(INTEGER, ImmutableList.of());

Page leftPage = new Page(new IntArrayBlock(1, Optional.empty(), new int[]{1234}));
Page rightPage1 = new Page(new IntArrayBlock(1, Optional.empty(), new int[]{1234}));
Page rightPage2 = new Page(new IntArrayBlock(1, Optional.empty(), new int[]{5678}));
Page leftPage = new Page(new IntArrayBlock(1, Optional.empty(), new int[] {1234}));
Page rightPage1 = new Page(new IntArrayBlock(1, Optional.empty(), new int[] {1234}));
Page rightPage2 = new Page(new IntArrayBlock(1, Optional.empty(), new int[] {5678}));

// This works because IntegerType is comparable.
assertThat(strategy.rowEqualsRow(0, leftPage, 0, rightPage1)).isTrue();
Expand All @@ -84,21 +84,21 @@ public void testRowEqualsRowWithMapType()

Page leftPage = new Page(mapType.createBlockFromKeyValue(
Optional.empty(),
new int[]{0, 1},
new IntArrayBlock(1, Optional.empty(), new int[]{1234}),
new IntArrayBlock(1, Optional.empty(), new int[]{5678})));
new int[] {0, 1},
new IntArrayBlock(1, Optional.empty(), new int[] {1234}),
new IntArrayBlock(1, Optional.empty(), new int[] {5678})));

Page rightPage1 = new Page(mapType.createBlockFromKeyValue(
Optional.empty(),
new int[]{0, 1},
new IntArrayBlock(1, Optional.empty(), new int[]{1234}),
new IntArrayBlock(1, Optional.empty(), new int[]{5678})));
new int[] {0, 1},
new IntArrayBlock(1, Optional.empty(), new int[] {1234}),
new IntArrayBlock(1, Optional.empty(), new int[] {5678})));

Page rightPage2 = new Page(mapType.createBlockFromKeyValue(
Optional.empty(),
new int[]{0, 1},
new IntArrayBlock(1, Optional.empty(), new int[]{1234}),
new IntArrayBlock(1, Optional.empty(), new int[]{1234})));
new int[] {0, 1},
new IntArrayBlock(1, Optional.empty(), new int[] {1234}),
new IntArrayBlock(1, Optional.empty(), new int[] {1234})));

// This works because MapType is comparable.
assertThat(strategy.rowEqualsRow(0, leftPage, 0, rightPage1)).isTrue();
Expand All @@ -108,7 +108,7 @@ public void testRowEqualsRowWithMapType()
@Test
public void testCompareSortChannelPositionsWithIntegerType()
{
Block block = new IntArrayBlock(3, Optional.empty(), new int[]{1234, 5678, 1234});
Block block = new IntArrayBlock(3, Optional.empty(), new int[] {1234, 5678, 1234});
SimplePagesHashStrategy strategy = createSimplePagesHashStrategy(INTEGER, ImmutableList.of(block));

// This works because IntegerType is orderable.
Expand All @@ -123,9 +123,9 @@ public void testCompareSortChannelPositionsWithMapType()
MapType mapType = new MapType(INTEGER, INTEGER, new TypeOperators());
Block block = mapType.createBlockFromKeyValue(
Optional.empty(),
new int[]{0, 1},
new IntArrayBlock(1, Optional.empty(), new int[]{1234}),
new IntArrayBlock(1, Optional.empty(), new int[]{5678}));
new int[] {0, 1},
new IntArrayBlock(1, Optional.empty(), new int[] {1234}),
new IntArrayBlock(1, Optional.empty(), new int[] {5678}));

SimplePagesHashStrategy strategy = createSimplePagesHashStrategy(mapType, ImmutableList.of(block));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1448,7 +1448,7 @@ private static void assertPartitionedRemovePage(LocalExchangeSource source, int
Page page = source.removePage();
assertThat(page).isNotNull();

LocalPartitionGenerator partitionGenerator = new LocalPartitionGenerator(createChannelsHashGenerator(TYPES, new int[]{0}, TYPE_OPERATORS), partitionCount);
LocalPartitionGenerator partitionGenerator = new LocalPartitionGenerator(createChannelsHashGenerator(TYPES, new int[] {0}, TYPE_OPERATORS), partitionCount);
for (int position = 0; position < page.getPositionCount(); position++) {
assertThat(partitionGenerator.getPartition(page, position)).isEqualTo(partition);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ public void testFullOnPositionCountLimit()
Block rleBlock = RunLengthEncodedBlock.create(VARCHAR, Slices.utf8Slice("test"), 10);
Page inputPage = new Page(rleBlock);

IntArrayList positions = IntArrayList.wrap(new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
IntArrayList positions = IntArrayList.wrap(new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
// Append 32760 positions, just less than MAX_POSITION_COUNT
assertEquals(32768, PositionsAppenderPageBuilder.MAX_POSITION_COUNT, "expected MAX_POSITION_COUNT to be 32768");
for (int i = 0; i < 3276; i++) {
Expand Down Expand Up @@ -85,7 +85,7 @@ public void testFullOnDirectSizeInBytes()
Block rleBlock = RunLengthEncodedBlock.create(VARCHAR, Slices.utf8Slice("test"), 10);
Page inputPage = new Page(rleBlock);

IntArrayList positions = IntArrayList.wrap(new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
IntArrayList positions = IntArrayList.wrap(new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
pageBuilder.appendToOutputPartition(inputPage, positions);
// 10 positions inserted, size in bytes is still the same since we're in RLE mode but direct size is 10x
sizeAccumulator = pageBuilder.computeAppenderSizes();
Expand Down Expand Up @@ -124,7 +124,7 @@ public void testFlushUsefulDictionariesOnRelease()
Block dictionaryBlock = DictionaryBlock.create(10, valueBlock, new int[10]);
Page inputPage = new Page(dictionaryBlock);

pageBuilder.appendToOutputPartition(inputPage, IntArrayList.wrap(new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}));
pageBuilder.appendToOutputPartition(inputPage, IntArrayList.wrap(new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}));
// Dictionary mode appender should report the size of the ID's, but doesn't currently track
// the per-position size at all because it would be inefficient
assertEquals(Integer.BYTES * 10, pageBuilder.getSizeInBytes());
Expand All @@ -140,7 +140,7 @@ public void testFlattenUnhelpfulDictionariesOnRelease()
{
// Create unhelpful dictionary wrapping
Block valueBlock = createRandomBlockForType(VARCHAR, 10, 0.25f);
Block dictionaryBlock = DictionaryBlock.create(10, valueBlock, new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Block dictionaryBlock = DictionaryBlock.create(10, valueBlock, new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Page inputPage = new Page(dictionaryBlock);

// Ensure the builder allows the entire value block to be inserted without being full
Expand All @@ -152,7 +152,7 @@ public void testFlattenUnhelpfulDictionariesOnRelease()
List.of(VARCHAR),
new PositionsAppenderFactory(new BlockTypeOperators()));

pageBuilder.appendToOutputPartition(inputPage, IntArrayList.wrap(new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}));
pageBuilder.appendToOutputPartition(inputPage, IntArrayList.wrap(new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}));
assertEquals(Integer.BYTES * 10, pageBuilder.getSizeInBytes());
assertFalse(pageBuilder.isFull());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ public void testCacheInvalidatedOnBadDisk()
// Set second spiller path to read-only after initialization to emulate a disk failing during runtime
setPosixFilePermissions(spillPath2.toPath(), ImmutableSet.of(PosixFilePermission.OWNER_READ));

assertThatThrownBy(() -> { getUnchecked(singleStreamSpiller2.spill(page)); })
assertThatThrownBy(() -> getUnchecked(singleStreamSpiller2.spill(page)))
.isInstanceOf(com.google.common.util.concurrent.UncheckedExecutionException.class)
.hasMessageContaining("Failed to spill pages");
spillers.add(singleStreamSpiller2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,10 +104,10 @@ public void testUpdateAndDeletedMerge()
Page inputPage = makePageFromBlocks(
5,
Optional.of(rowIdNulls),
new Block[]{
new LongArrayBlock(5, Optional.of(rowIdNulls), new long[]{2, 0, 1, 2, 2}), // TransactionId
new LongArrayBlock(5, Optional.of(rowIdNulls), new long[]{0, 0, 3, 1, 2}), // rowId
new IntArrayBlock(5, Optional.of(rowIdNulls), new int[]{536870912, 0, 536870912, 536870912, 536870912})}, // bucket
new Block[] {
new LongArrayBlock(5, Optional.of(rowIdNulls), new long[] {2, 0, 1, 2, 2}), // TransactionId
new LongArrayBlock(5, Optional.of(rowIdNulls), new long[] {0, 0, 3, 1, 2}), // rowId
new IntArrayBlock(5, Optional.of(rowIdNulls), new int[] {536870912, 0, 536870912, 536870912, 536870912})}, // bucket
new Block[] {
// customer
makeVarcharArrayBlock("Aaron", "Carol", "Dave", "Dave", "Ed"),
Expand Down Expand Up @@ -145,9 +145,9 @@ public void testAnotherMergeCase()
5,
Optional.of(rowIdNulls),
new Block[] {
new LongArrayBlock(5, Optional.of(rowIdNulls), new long[]{2, 0, 1, 2, 2}), // TransactionId
new LongArrayBlock(5, Optional.of(rowIdNulls), new long[]{0, 0, 3, 1, 2}), // rowId
new IntArrayBlock(5, Optional.of(rowIdNulls), new int[]{536870912, 0, 536870912, 536870912, 536870912})}, // bucket
new LongArrayBlock(5, Optional.of(rowIdNulls), new long[] {2, 0, 1, 2, 2}), // TransactionId
new LongArrayBlock(5, Optional.of(rowIdNulls), new long[] {0, 0, 3, 1, 2}), // rowId
new IntArrayBlock(5, Optional.of(rowIdNulls), new int[] {536870912, 0, 536870912, 536870912, 536870912})}, // bucket
new Block[] {
// customer
makeVarcharArrayBlock("Aaron", "Carol", "Dave", "Dave", "Ed"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public abstract class BasePushdownPlanTest
{
protected Optional<TableHandle> getTableHandle(Session session, QualifiedObjectName objectName)
{
return getQueryRunner().inTransaction(session, transactionSession -> { return getQueryRunner().getMetadata().getTableHandle(transactionSession, objectName); });
return getQueryRunner().inTransaction(session, transactionSession -> getQueryRunner().getMetadata().getTableHandle(transactionSession, objectName));
}

protected Map<String, ColumnHandle> getColumnHandles(Session session, QualifiedObjectName tableName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public class VariableWidthBlockBuilder
implements BlockBuilder
{
private static final int INSTANCE_SIZE = instanceSize(VariableWidthBlockBuilder.class);
private static final Block NULL_VALUE_BLOCK = new VariableWidthBlock(0, 1, EMPTY_SLICE, new int[]{0, 0}, new boolean[]{true});
private static final Block NULL_VALUE_BLOCK = new VariableWidthBlock(0, 1, EMPTY_SLICE, new int[] {0, 0}, new boolean[] {true});
private static final int SIZE_IN_BYTES_PER_POSITION = Integer.BYTES + Byte.BYTES;

private final BlockBuilderStatus blockBuilderStatus;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public static Block wrapByteArrayAsBooleanBlockWithoutNulls(byte[] booleansAsByt
public static Block createBlockForSingleNonNullValue(boolean value)
{
byte byteValue = value ? (byte) 1 : 0;
return new ByteArrayBlock(1, Optional.empty(), new byte[]{byteValue});
return new ByteArrayBlock(1, Optional.empty(), new byte[] {byteValue});
}

private BooleanType()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ public void setup()
else if (type.equals("ROW(BIGINT)")) {
Optional<boolean[]> rowIsNull = nullsAllowed ? Optional.of(generateIsNull(POSITIONS)) : Optional.empty();
LongArrayBlock randomLongArrayBlock = new LongArrayBlock(POSITIONS, rowIsNull, new Random(SEED).longs().limit(POSITIONS).toArray());
block = RowBlock.fromNotNullSuppressedFieldBlocks(POSITIONS, rowIsNull, new Block[]{randomLongArrayBlock});
block = RowBlock.fromNotNullSuppressedFieldBlocks(POSITIONS, rowIsNull, new Block[] {randomLongArrayBlock});
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public class TestColumnarMap
{
private static final TypeOperators TYPE_OPERATORS = new TypeOperators();
private static final MapType MAP_TYPE = new MapType(VARCHAR, VARCHAR, TYPE_OPERATORS);
private static final int[] MAP_SIZES = new int[]{16, 0, 13, 1, 2, 11, 4, 7};
private static final int[] MAP_SIZES = new int[] {16, 0, 13, 1, 2, 11, 4, 7};

@Test
public void test()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public void testNestedGetLoadedBlock()
List<Block> actualNotifications = new ArrayList<>();
Block arrayBlock = new IntArrayBlock(2, Optional.empty(), new int[] {0, 1});
LazyBlock lazyArrayBlock = new LazyBlock(2, () -> arrayBlock);
Block rowBlock = RowBlock.fromFieldBlocks(2, new Block[]{lazyArrayBlock});
Block rowBlock = RowBlock.fromFieldBlocks(2, new Block[] {lazyArrayBlock});
LazyBlock lazyBlock = new LazyBlock(2, () -> rowBlock);
LazyBlock.listenForLoads(lazyBlock, actualNotifications::add);

Expand Down
Loading
Loading