Skip to content

Commit

Permalink
Build: Add checkstyle rule for instantiating HashMap, HashSet, ArrayL…
Browse files Browse the repository at this point in the history
…ist (apache#3689)
  • Loading branch information
hililiwei authored Dec 18, 2021
1 parent e9d2351 commit 2531545
Show file tree
Hide file tree
Showing 59 changed files with 149 additions and 168 deletions.
15 changes: 15 additions & 0 deletions .baseline/checkstyle/checkstyle.xml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,21 @@
<property name="format" value="new JavaSparkContext\(.*\)"/>
<property name="message" value="Prefer using JavaSparkContext.fromSparkContext() instead of calling a constructor directly."/>
</module>
<module name="RegexpSingleline">
<property name="format" value="new HashMap&lt;&gt;\(.*\)"/>
<property name="message"
value="Prefer using Maps.newHashMap instead."/>
</module>
<module name="RegexpSingleline">
<property name="format" value="new ArrayList&lt;&gt;\(.*\)"/>
<property name="message"
value="Prefer using Lists.newArrayList() instead."/>
</module>
<module name="RegexpSingleline">
<property name="format" value="new HashSet&lt;&gt;\(.*\)"/>
<property name="message"
value="Prefer using Sets.newHashSet() instead."/>
</module>
<module name="SuppressionFilter"> <!-- baseline-gradle: README.md -->
<property name="file" value="${config_loc}/checkstyle-suppressions.xml"/>
</module>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -77,6 +76,7 @@
import org.apache.iceberg.parquet.Parquet;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableSet;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.UUIDUtil;
Expand Down Expand Up @@ -604,7 +604,7 @@ private void writeTableWithIncrementalRecords() throws Exception {
}

private void writeTable(boolean constantRecords) throws Exception {
rowsWritten = new ArrayList<>();
rowsWritten = Lists.newArrayList();
tables = new HadoopTables();
tableLocation = temp.newFolder("test").toString();

Expand Down Expand Up @@ -728,7 +728,7 @@ private static org.apache.arrow.vector.types.pojo.Schema createExpectedArrowSche
}

private List<GenericRecord> createIncrementalRecordsForDate(Schema schema, LocalDateTime datetime) {
List<GenericRecord> records = new ArrayList<>();
List<GenericRecord> records = Lists.newArrayList();
for (int i = 0; i < NUM_ROWS_PER_MONTH; i++) {
GenericRecord rec = GenericRecord.create(schema);
rec.setField("timestamp", datetime.plus(i, ChronoUnit.DAYS));
Expand Down Expand Up @@ -764,7 +764,7 @@ private List<GenericRecord> createIncrementalRecordsForDate(Schema schema, Local
}

private List<GenericRecord> createConstantRecordsForDate(Schema schema, LocalDateTime datetime) {
List<GenericRecord> records = new ArrayList<>();
List<GenericRecord> records = Lists.newArrayList();
for (int i = 0; i < NUM_ROWS_PER_MONTH; i++) {
GenericRecord rec = GenericRecord.create(schema);
rec.setField("timestamp", datetime);
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/org/apache/iceberg/BaseFileScanTask.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.apache.iceberg;

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
Expand All @@ -28,6 +27,7 @@
import org.apache.iceberg.relocated.com.google.common.annotations.VisibleForTesting;
import org.apache.iceberg.relocated.com.google.common.base.MoreObjects;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;

class BaseFileScanTask implements FileScanTask {
private final DataFile file;
Expand Down Expand Up @@ -117,7 +117,7 @@ static final class OffsetsAwareTargetSplitSizeScanTaskIterator implements Iterat
this.offsets = ImmutableList.copyOf(offsetList);
this.parentScanTask = parentScanTask;
this.targetSplitSize = targetSplitSize;
this.splitSizes = new ArrayList<>(offsets.size());
this.splitSizes = Lists.newArrayListWithCapacity(offsets.size());
if (offsets.size() > 0) {
int lastIndex = offsets.size() - 1;
for (int index = 0; index < lastIndex; index++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.AccessDeniedException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -287,7 +286,7 @@ public List<Namespace> listNamespaces(Namespace namespace) {
try {
// using the iterator listing allows for paged downloads
// from HDFS and prefetching from object storage.
List<Namespace> namespaces = new ArrayList<>();
List<Namespace> namespaces = Lists.newArrayList();
RemoteIterator<FileStatus> it = fs.listStatusIterator(nsPath);
while (it.hasNext()) {
Path path = it.next().getPath();
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/java/org/apache/iceberg/util/Tasks.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.apache.iceberg.util;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
Expand Down Expand Up @@ -471,7 +470,7 @@ private static Collection<Throwable> waitFor(Collection<Future<?>> futures) {
}

if (numFinished == futures.size()) {
List<Throwable> uncaught = new ArrayList<>();
List<Throwable> uncaught = Lists.newArrayList();
// all of the futures are done, get any uncaught exceptions
for (Future<?> future : futures) {
try {
Expand Down
3 changes: 1 addition & 2 deletions core/src/test/java/org/apache/iceberg/TestMetrics.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -433,7 +432,7 @@ public void testMetricsForTopLevelWithMultipleRowGroup() throws Exception {
Assume.assumeTrue("Skip test for formats that do not support small row groups", supportsSmallRowGroups());

int recordCount = 201;
List<Record> records = new ArrayList<>(recordCount);
List<Record> records = Lists.newArrayListWithExpectedSize(recordCount);

for (int i = 0; i < recordCount; i++) {
Record newRecord = GenericRecord.create(SIMPLE_SCHEMA);
Expand Down
12 changes: 5 additions & 7 deletions core/src/test/java/org/apache/iceberg/TestRemoveSnapshots.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@
package org.apache.iceberg;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
Expand Down Expand Up @@ -887,15 +885,15 @@ public void testWithExpiringDanglingStageCommit() {
.appendFile(FILE_C)
.commit();

Set<String> deletedFiles = new HashSet<>();
Set<String> deletedFiles = Sets.newHashSet();

// Expire all commits including dangling staged snapshot.
table.expireSnapshots()
.deleteWith(deletedFiles::add)
.expireOlderThan(snapshotB.timestampMillis() + 1)
.commit();

Set<String> expectedDeletes = new HashSet<>();
Set<String> expectedDeletes = Sets.newHashSet();
expectedDeletes.add(snapshotA.manifestListLocation());

// Files should be deleted of dangling staged snapshot
Expand Down Expand Up @@ -932,7 +930,7 @@ public void testWithCherryPickTableSnapshot() {
Snapshot snapshotA = table.currentSnapshot();

// `B` commit
Set<String> deletedAFiles = new HashSet<>();
Set<String> deletedAFiles = Sets.newHashSet();
table.newOverwrite()
.addFile(FILE_B)
.deleteFile(FILE_A)
Expand Down Expand Up @@ -964,7 +962,7 @@ public void testWithCherryPickTableSnapshot() {
table.manageSnapshots()
.setCurrentSnapshot(snapshotC.snapshotId())
.commit();
List<String> deletedFiles = new ArrayList<>();
List<String> deletedFiles = Lists.newArrayList();

// Expire `C`
table.expireSnapshots()
Expand Down Expand Up @@ -1017,7 +1015,7 @@ public void testWithExpiringStagedThenCherrypick() {
base = readMetadata();
Snapshot snapshotD = base.snapshots().get(3);

List<String> deletedFiles = new ArrayList<>();
List<String> deletedFiles = Lists.newArrayList();

// Expire `B` commit.
table.expireSnapshots()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@

package org.apache.iceberg;

import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.iceberg.ManifestEntry.Status;
import org.apache.iceberg.exceptions.CommitFailedException;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.junit.Test;

public class TestSequenceNumberForV2Table extends TableTestBase {
Expand Down Expand Up @@ -236,8 +236,8 @@ public void testMultipleOperationsTransaction() {
V2Assert.assertEquals("Snapshot sequence number should be 1", 1, snap1.sequenceNumber());
V2Assert.assertEquals("Last sequence number should be 0", 0, readMetadata().lastSequenceNumber());

Set<DataFile> toAddFiles = new HashSet<>();
Set<DataFile> toDeleteFiles = new HashSet<>();
Set<DataFile> toAddFiles = Sets.newHashSet();
Set<DataFile> toDeleteFiles = Sets.newHashSet();
toAddFiles.add(FILE_B);
toDeleteFiles.add(FILE_A);
txn.newRewrite().rewriteFiles(toDeleteFiles, toAddFiles).commit();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import org.apache.iceberg.AppendFiles;
import org.apache.iceberg.AssertHelpers;
import org.apache.iceberg.DataFile;
Expand All @@ -34,6 +33,7 @@
import org.apache.iceberg.SortOrder;
import org.apache.iceberg.Table;
import org.apache.iceberg.exceptions.NoSuchTableException;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.transforms.Transform;
import org.apache.iceberg.transforms.Transforms;
Expand Down Expand Up @@ -168,7 +168,7 @@ private static void createDummyTable(File tableDir, File dataDir) throws IOExcep
Table table = TABLES.create(SCHEMA, tableDir.toURI().toString());
AppendFiles append = table.newAppend();
String data = dataDir.getPath() + "/data.parquet";
Files.write(Paths.get(data), new ArrayList<>(), StandardCharsets.UTF_8);
Files.write(Paths.get(data), Lists.newArrayList(), StandardCharsets.UTF_8);
DataFile dataFile = DataFiles.builder(PartitionSpec.unpartitioned())
.withPath(data)
.withFileSizeInBytes(10)
Expand Down
11 changes: 5 additions & 6 deletions core/src/test/java/org/apache/iceberg/jdbc/TestJdbcCatalog.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -284,7 +283,7 @@ public void testConcurrentCommit() throws IOException {
Table table = catalog.createTable(tableIdentifier, SCHEMA, PartitionSpec.unpartitioned());
// append file and commit!
String data = temp.newFile("data.parquet").getPath();
Files.write(Paths.get(data), new ArrayList<>(), StandardCharsets.UTF_8);
Files.write(Paths.get(data), Lists.newArrayList(), StandardCharsets.UTF_8);
DataFile dataFile = DataFiles.builder(PartitionSpec.unpartitioned())
.withPath(data)
.withFileSizeInBytes(10)
Expand All @@ -294,7 +293,7 @@ public void testConcurrentCommit() throws IOException {
Assert.assertEquals(1, table.history().size());
catalog.dropTable(tableIdentifier);
data = temp.newFile("data2.parquet").getPath();
Files.write(Paths.get(data), new ArrayList<>(), StandardCharsets.UTF_8);
Files.write(Paths.get(data), Lists.newArrayList(), StandardCharsets.UTF_8);
DataFile dataFile2 = DataFiles.builder(PartitionSpec.unpartitioned())
.withPath(data)
.withFileSizeInBytes(10)
Expand All @@ -313,7 +312,7 @@ public void testCommitHistory() throws IOException {
Table table = catalog.loadTable(testTable);

String data = temp.newFile("data.parquet").getPath();
Files.write(Paths.get(data), new ArrayList<>(), StandardCharsets.UTF_8);
Files.write(Paths.get(data), Lists.newArrayList(), StandardCharsets.UTF_8);
DataFile dataFile = DataFiles.builder(PartitionSpec.unpartitioned())
.withPath(data)
.withFileSizeInBytes(10)
Expand All @@ -323,7 +322,7 @@ public void testCommitHistory() throws IOException {
Assert.assertEquals(1, table.history().size());

data = temp.newFile("data2.parquet").getPath();
Files.write(Paths.get(data), new ArrayList<>(), StandardCharsets.UTF_8);
Files.write(Paths.get(data), Lists.newArrayList(), StandardCharsets.UTF_8);
dataFile = DataFiles.builder(PartitionSpec.unpartitioned())
.withPath(data)
.withFileSizeInBytes(10)
Expand All @@ -333,7 +332,7 @@ public void testCommitHistory() throws IOException {
Assert.assertEquals(2, table.history().size());

data = temp.newFile("data3.parquet").getPath();
Files.write(Paths.get(data), new ArrayList<>(), StandardCharsets.UTF_8);
Files.write(Paths.get(data), Lists.newArrayList(), StandardCharsets.UTF_8);
dataFile = DataFiles.builder(PartitionSpec.unpartitioned())
.withPath(data)
.withFileSizeInBytes(10)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.UUID;
Expand Down Expand Up @@ -137,7 +136,7 @@ public class TestMetricsRowGroupFilterTypes {

@Before
public void createInputFile() throws IOException {
List<Record> records = new ArrayList<>();
List<Record> records = Lists.newArrayList();
// create 50 records
for (int i = 0; i < 50; i += 1) {
Record record = GenericRecord.create(FILE_SCHEMA);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.apache.iceberg.hive;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
Expand All @@ -30,6 +29,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.iceberg.Schema;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
import org.slf4j.Logger;
Expand Down Expand Up @@ -61,7 +61,7 @@ static Type convert(TypeInfo typeInfo, boolean autoConvert) {
}

List<Types.NestedField> convertInternal(List<String> names, List<TypeInfo> typeInfos, List<String> comments) {
List<Types.NestedField> result = new ArrayList<>(names.size());
List<Types.NestedField> result = Lists.newArrayListWithExpectedSize(names.size());
for (int i = 0; i < names.size(); ++i) {
result.add(Types.NestedField.optional(id++, names.get(i), convertType(typeInfos.get(i)),
(comments.isEmpty() || i >= comments.size()) ? null : comments.get(i)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@

package org.apache.iceberg.hive;

import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Schema;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;

Expand Down Expand Up @@ -66,9 +66,9 @@ public static Schema convert(List<FieldSchema> fieldSchemas) {
* @return An equivalent Iceberg Schema
*/
public static Schema convert(List<FieldSchema> fieldSchemas, boolean autoConvert) {
List<String> names = new ArrayList<>(fieldSchemas.size());
List<TypeInfo> typeInfos = new ArrayList<>(fieldSchemas.size());
List<String> comments = new ArrayList<>(fieldSchemas.size());
List<String> names = Lists.newArrayListWithExpectedSize(fieldSchemas.size());
List<TypeInfo> typeInfos = Lists.newArrayListWithExpectedSize(fieldSchemas.size());
List<String> comments = Lists.newArrayListWithExpectedSize(fieldSchemas.size());

for (FieldSchema col : fieldSchemas) {
names.add(col.getName());
Expand Down
Loading

0 comments on commit 2531545

Please sign in to comment.