Skip to content

Commit

Permalink
[Improve](common)Optimize logging performance with LOG.isDebugEnabled()
Browse files Browse the repository at this point in the history
  • Loading branch information
CalvinKirs committed Feb 18, 2024
1 parent d3a22fb commit 9daf81d
Show file tree
Hide file tree
Showing 384 changed files with 1,335 additions and 2,902 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,7 @@ public abstract class AvroReader {
protected void openSchemaReader() throws IOException {
InputStream inputStream = new BufferedInputStream(fileSystem.open(path));
schemaReader = new DataFileStream<>(inputStream, new GenericDatumReader<>());
if (LOG.isDebugEnabled()) {
LOG.debug("success open avro schema reader.");
}
LOG.debug("success open avro schema reader.");
}

protected void openDataReader(AvroFileContext avroFileContext) throws IOException {
Expand All @@ -74,9 +72,7 @@ protected void openDataReader(AvroFileContext avroFileContext) throws IOExceptio
FileSplit fileSplit =
new FileSplit(path, avroFileContext.getSplitStartOffset(), avroFileContext.getSplitSize(), job);
dataReader = new AvroRecordReader<>(job, fileSplit);
if (LOG.isDebugEnabled()) {
LOG.debug("success open avro data reader.");
}
LOG.debug("success open avro data reader.");
}

protected void projectionSchema(JobConf job, AvroFileContext avroFileContext) {
Expand All @@ -103,9 +99,7 @@ protected void projectionSchema(JobConf job, AvroFileContext avroFileContext) {
projectionSchema = avroSchema;
}
AvroJob.setInputSchema(job, projectionSchema);
if (LOG.isDebugEnabled()) {
LOG.debug("projection avro schema is:" + projectionSchema.toString());
}
LOG.debug("projection avro schema is:" + projectionSchema.toString());
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -202,9 +202,7 @@ private static void writeAttribute(JsonGenerator jg, ObjectName oname,
} catch (RuntimeErrorException e) {
// RuntimeErrorException happens when an unexpected failure occurs in getAttribute
// for example https://issues.apache.org/jira/browse/DAEMON-120
if (LOG.isDebugEnabled()) {
LOG.debug("getting attribute " + attName + " of " + oname + " threw an exception", e);
}
LOG.debug("getting attribute " + attName + " of " + oname + " threw an exception", e);
return;
} catch (AttributeNotFoundException e) {
//Ignored the attribute was not found, which should never happen because the bean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,7 @@ public void close() {
classLoader.close();
} catch (IOException e) {
// Log and ignore.
if (LOG.isDebugEnabled()) {
LOG.debug("Error closing the URLClassloader.", e);
}
LOG.debug("Error closing the URLClassloader.", e);
}
}
// We are now un-usable (because the class loader has been
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -318,9 +318,7 @@ protected void init(TJavaUdfExecutorCtorParams request, String jarPath, Type fun
Pair<Boolean, JavaUdfDataType> returnType = UdfUtils.setReturnType(funcRetType,
methods[idx].getReturnType());
if (!returnType.first) {
if (LOG.isDebugEnabled()) {
LOG.debug("result function set return parameterTypes has error");
}
LOG.debug("result function set return parameterTypes has error");
} else {
retType = returnType.second;
retClass = methods[idx].getReturnType();
Expand All @@ -332,18 +330,14 @@ protected void init(TJavaUdfExecutorCtorParams request, String jarPath, Type fun
addIndex = methodAccess.getIndex(UDAF_ADD_FUNCTION);
argClass = methods[idx].getParameterTypes();
if (argClass.length != parameterTypes.length + 1) {
if (LOG.isDebugEnabled()) {
LOG.debug("add function parameterTypes length not equal " + argClass.length + " "
+ parameterTypes.length + " " + methods[idx].getName());
}
LOG.debug("add function parameterTypes length not equal " + argClass.length + " "
+ parameterTypes.length + " " + methods[idx].getName());
}
if (!(parameterTypes.length == 0)) {
Pair<Boolean, JavaUdfDataType[]> inputType = UdfUtils.setArgTypes(parameterTypes,
argClass, true);
if (!inputType.first) {
if (LOG.isDebugEnabled()) {
LOG.debug("add function set arg parameterTypes has error");
}
LOG.debug("add function set arg parameterTypes has error");
} else {
argTypes = inputType.second;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,7 @@ protected void init(TJavaUdfExecutorCtorParams request, String jarPath, Type fun
String className = request.fn.scalar_fn.symbol;
ArrayList<String> signatures = Lists.newArrayList();
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading UDF '" + className + "' from " + jarPath);
}
LOG.debug("Loading UDF '" + className + "' from " + jarPath);
ClassLoader loader;
if (jarPath != null) {
// Save for cleanup.
Expand Down Expand Up @@ -188,9 +186,7 @@ protected void init(TJavaUdfExecutorCtorParams request, String jarPath, Type fun
retType = returnType.second;
}
argTypes = new JavaUdfDataType[0];
if (LOG.isDebugEnabled()) {
LOG.debug("Loaded UDF '" + className + "' from " + jarPath);
}
LOG.debug("Loaded UDF '" + className + "' from " + jarPath);
return;
}
returnType = UdfUtils.setReturnType(funcRetType, m.getReturnType());
Expand All @@ -207,9 +203,7 @@ protected void init(TJavaUdfExecutorCtorParams request, String jarPath, Type fun
} else {
argTypes = inputType.second;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Loaded UDF '" + className + "' from " + jarPath);
}
LOG.debug("Loaded UDF '" + className + "' from " + jarPath);
retType.setKeyType(keyType);
retType.setValueType(valueType);
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,7 @@ public class PaimonJniScanner extends JniScanner {

public PaimonJniScanner(int batchSize, Map<String, String> params) {
this.classLoader = this.getClass().getClassLoader();
if (LOG.isDebugEnabled()) {
LOG.debug("params:{}", params);
}
LOG.debug("params:{}", params);
this.params = params;
String[] requiredFields = params.get("required_fields").split(",");
String[] requiredTypes = params.get("columns_types").split("#");
Expand Down Expand Up @@ -119,17 +117,13 @@ private int[] getProjected() {

private List<Predicate> getPredicates() {
List<Predicate> predicates = PaimonScannerUtils.decodeStringToObject(paimonPredicate);
if (LOG.isDebugEnabled()) {
LOG.debug("predicates:{}", predicates);
}
LOG.debug("predicates:{}", predicates);
return predicates;
}

private Split getSplit() {
Split split = PaimonScannerUtils.decodeStringToObject(paimonSplit);
if (LOG.isDebugEnabled()) {
LOG.debug("split:{}", split);
}
LOG.debug("split:{}", split);
return split;
}

Expand Down Expand Up @@ -207,9 +201,7 @@ private void initTable() {
}
this.table = tableExt.getTable();
paimonAllFieldNames = PaimonScannerUtils.fieldNames(this.table.rowType());
if (LOG.isDebugEnabled()) {
LOG.debug("paimonAllFieldNames:{}", paimonAllFieldNames);
}
LOG.debug("paimonAllFieldNames:{}", paimonAllFieldNames);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -1006,3 +1006,4 @@ public int getOlapColumnIndexSize() {
}
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -2322,3 +2322,4 @@ public static Type getTypeFromTypeName(String typeName) {
return typeMap.getOrDefault(typeName, Type.UNSUPPORTED);
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,4 @@ public String getPrefix() {
return prefix;
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,7 @@ public LimitInputStream(InputStream in, int limitspeed) throws IOException {
throw new IOException("InputStream is null");
}
speed = limitspeed;
if (LOG.isDebugEnabled()) {
LOG.debug("LimitinputStream limit speed: {}", speed);
}
LOG.debug("LimitinputStream limit speed: {}", speed);
this.in = in;
bytesReadTotal = 0;
bstart = false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,7 @@ public LimitOutputStream(OutputStream out, int limitspeed)
throw new IOException("OutputStream is null");
}
speed = limitspeed;
if (LOG.isDebugEnabled()) {
LOG.debug("LimitOutputStream limit speed: {}", speed);
}
LOG.debug("LimitOutputStream limit speed: {}", speed);
this.out = out;
bytesWriteTotal = 0;
bstart = false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,3 +158,4 @@ public static void makeAccessible(AccessibleObject classMember) {
}
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -285,3 +285,4 @@ private static boolean isSameType(Class<?> firstType, Class<?> secondType) {
}

}

Original file line number Diff line number Diff line change
Expand Up @@ -165,3 +165,4 @@ private static boolean isSameType(Class<?> firstType, Class<?> secondType) {
|| secondType.isPrimitive() && secondType == AutoType.getPrimitiveType(firstType);
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -2190,4 +2190,4 @@ public <R> R call(IMetaStoreClient client, ThrowingFunction<IMetaStoreClient, R,
return FunctionalUtils.call(client, Optional.empty(), allowFailure, consumer, this.readWriteClientType,
actionName, parameters);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1543,3 +1543,4 @@ public UpdateJobFromSourceControlResult updateJobFromSourceControl(UpdateJobFrom
return null;
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -130,4 +130,4 @@ List<ColumnStatisticsError> updateTableColumnStatistics(
String tableName,
List<ColumnStatistics> columnStatistics
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -195,4 +195,4 @@ public List<ColumnStatisticsError> updateTableColumnStatistics(String dbName, St
return awsGlueMetastore.updateTableColumnStatistics(dbName, tableName, columnStatistics);
}

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -182,4 +182,4 @@ public int hashCode() {
return Objects.hash(dbName, tableName);
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,4 @@ private boolean isCacheEnabled(Configuration conf) {
boolean tableCacheEnabled = conf.getBoolean(AWS_GLUE_TABLE_CACHE_ENABLE, false);
return (databaseCacheEnabled || tableCacheEnabled);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -659,4 +659,4 @@ public UpdateColumnStatisticsForTableResult call() throws Exception {
}
return columnStatisticsErrors;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,4 @@ private AWSGlueConfig() {
public static final String AWS_GLUE_ACCESS_KEY = "aws.glue.access-key";
public static final String AWS_GLUE_SECRET_KEY = "aws.glue.secret-key";
public static final String AWS_GLUE_SESSION_TOKEN = "aws.glue.session-token";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -150,4 +150,4 @@ public List<Partition> getPartitionsFailed() {
return partitionsFailed;
}

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -446,10 +446,8 @@ private RollupJobV2 createMaterializedViewJob(String rawSql, String mvName, Stri

mvJob.addMVIndex(partitionId, mvIndex);

if (LOG.isDebugEnabled()) {
LOG.debug("create materialized view index {} based on index {} in partition {}",
mvIndexId, baseIndexId, partitionId);
}
LOG.debug("create materialized view index {} based on index {} in partition {}",
mvIndexId, baseIndexId, partitionId);
} // end for partitions

LOG.info("finished to create materialized view job: {}", mvJob.getJobId());
Expand Down Expand Up @@ -610,9 +608,7 @@ private List<Column> checkAndPrepareMaterializedView(CreateMaterializedViewStmt
column.setUniqueId(Column.COLUMN_UNIQUE_ID_INIT_VALUE);
});
}
if (LOG.isDebugEnabled()) {
LOG.debug("lightSchemaChange:{}, newMVColumns:{}", olapTable.getEnableLightSchemaChange(), newMVColumns);
}
LOG.debug("lightSchemaChange:{}, newMVColumns:{}", olapTable.getEnableLightSchemaChange(), newMVColumns);
return newMVColumns;
}

Expand Down Expand Up @@ -856,11 +852,8 @@ public List<Column> checkAndPrepareMaterializedView(AddRollupClause addRollupCla
column.setUniqueId(Column.COLUMN_UNIQUE_ID_INIT_VALUE);
});
}
if (LOG.isDebugEnabled()) {
LOG.debug("lightSchemaChange:{}, rollupSchema:{}, baseSchema:{}",
olapTable.getEnableLightSchemaChange(), rollupSchema,
olapTable.getSchemaByIndexId(baseIndexId, true));
}
LOG.debug("lightSchemaChange:{}, rollupSchema:{}, baseSchema:{}",
olapTable.getEnableLightSchemaChange(), rollupSchema, olapTable.getSchemaByIndexId(baseIndexId, true));
return rollupSchema;
}

Expand Down Expand Up @@ -1133,11 +1126,9 @@ private void runAlterJobWithConcurrencyLimit(RollupJobV2 rollupJobV2) {
tableRunningJobSet.add(jobId);
shouldJobRun = true;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("number of running alter job {} in table {} exceed limit {}. job {} is suspended",
tableRunningJobSet.size(), rollupJobV2.getTableId(),
Config.max_running_rollup_job_num_per_table, rollupJobV2.getJobId());
}
LOG.debug("number of running alter job {} in table {} exceed limit {}. job {} is suspended",
tableRunningJobSet.size(), rollupJobV2.getTableId(),
Config.max_running_rollup_job_num_per_table, rollupJobV2.getJobId());
shouldJobRun = false;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -608,10 +608,8 @@ private void onFinished(OlapTable tbl) {

tbl.getIndexMetaByIndexId(rollupIndexId).setMaxColUniqueId(maxColUniqueId);

if (LOG.isDebugEnabled()) {
LOG.debug("rollupIndexId:{}, maxColUniqueId:{}, indexIdToSchema:{}", rollupIndexId, maxColUniqueId,
tbl.getIndexIdToSchema(true));
}
LOG.debug("rollupIndexId:{}, maxColUniqueId:{}, indexIdToSchema:{}", rollupIndexId, maxColUniqueId,
tbl.getIndexIdToSchema(true));
tbl.rebuildFullSchema();
}

Expand Down
Loading

0 comments on commit 9daf81d

Please sign in to comment.