Skip to content

Commit

Permalink
Ensure traceId is set for write (#1122)
Browse files Browse the repository at this point in the history
  • Loading branch information
agrawal-siddharth authored Nov 22, 2023
1 parent 39af190 commit 35bef4d
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 4 deletions.
1 change: 1 addition & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

* PR #1117: Make read session caching duration configurable
* PR #1118: Improve read session caching key
* PR #1122: Set traceId on write
* PR #1127: Fix job labeling for mixed case Dataproc job names

## 0.34.0 - 2023-10-31
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,8 @@ public static SparkBigQueryConfig from(
}

com.google.common.base.Optional<String> traceApplicationNameParam =
getAnyOption(globalOptions, options, "traceApplicationName");
getAnyOption(globalOptions, options, "traceApplicationName")
.or(com.google.common.base.Optional.fromNullable("traceApplicationName"));
config.traceId =
traceApplicationNameParam.transform(
traceApplicationName -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,8 @@ public void testDefaults() {
assertThat(config.getWriteMethod()).isEqualTo(SparkBigQueryConfig.WriteMethod.INDIRECT);
assertThat(config.getCacheExpirationTimeInMinutes())
.isEqualTo(SparkBigQueryConfig.DEFAULT_CACHE_EXPIRATION_IN_MINUTES);
assertThat(config.getTraceId().isPresent()).isFalse();
assertThat(config.getTraceId().isPresent()).isTrue();
assertThat(config.getTraceId().get().startsWith("Spark:traceApplicationName:"));
assertThat(config.getBigQueryJobLabels()).isEmpty();
assertThat(config.getEnableModeCheckForSchemaFields()).isTrue();
assertThat(config.getDatetimeZoneId()).isEqualTo(ZoneId.of("UTC"));
Expand Down Expand Up @@ -171,7 +172,7 @@ public void testConfigFromOptions() {
.put("writeMethod", "direct")
.put("cacheExpirationTimeInMinutes", "100")
.put("traceJobId", "traceJobId")
.put("traceApplicationName", "traceApplicationName")
.put("traceApplicationName", "traceApplicationNameTest")
.put("bigQueryJobLabel.foo", "bar")
.put("enableModeCheckForSchemaFields", "false")
.put("datetimeZoneId", "Asia/Jerusalem")
Expand Down Expand Up @@ -223,7 +224,8 @@ public void testConfigFromOptions() {
assertThat(config.getArrowCompressionCodec()).isEqualTo(CompressionCodec.ZSTD);
assertThat(config.getWriteMethod()).isEqualTo(SparkBigQueryConfig.WriteMethod.DIRECT);
assertThat(config.getCacheExpirationTimeInMinutes()).isEqualTo(100);
assertThat(config.getTraceId()).isEqualTo(Optional.of("Spark:traceApplicationName:traceJobId"));
assertThat(config.getTraceId())
.isEqualTo(Optional.of("Spark:traceApplicationNameTest:traceJobId"));
assertThat(config.getBigQueryJobLabels()).hasSize(1);
assertThat(config.getBigQueryJobLabels()).containsEntry("foo", "bar");
assertThat(config.getEnableModeCheckForSchemaFields()).isFalse();
Expand Down

0 comments on commit 35bef4d

Please sign in to comment.