Skip to content

Commit

Permalink
add hdfs config
Browse files Browse the repository at this point in the history
  • Loading branch information
loneylee committed Mar 21, 2024
1 parent e2c2304 commit 856a621
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,15 @@ class GlutenClickHouseMergeTreeWriteOnHDFSSuite
val fs = FileSystem.get(conf)
fs.delete(new org.apache.hadoop.fs.Path("/test"), true)
FileUtils.deleteDirectory(new File(HDFS_METADATA_PATH))
FileUtils.deleteDirectory(new File(HDFS_CACHE_PATH))
// FileUtils.deleteDirectory(new File(HDFS_CACHE_PATH))
FileUtils.forceMkdir(new File(HDFS_METADATA_PATH))
FileUtils.forceMkdir(new File(HDFS_CACHE_PATH))
// FileUtils.forceMkdir(new File(HDFS_CACHE_PATH))
}

override protected def afterEach(): Unit = {
super.afterEach()
FileUtils.deleteDirectory(new File(HDFS_METADATA_PATH))
FileUtils.deleteDirectory(new File(HDFS_CACHE_PATH))
// FileUtils.deleteDirectory(new File(HDFS_CACHE_PATH))
}

test("test mergetree table write") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,12 @@ class GlutenClickHouseMergeTreeWriteOnObjectStorageAbstractSuite
"spark.gluten.sql.columnar.backend.ch.runtime_config.storage_configuration.policies.__hdfs_main.volumes.main.disk",
"hdfs_cache")
.set("spark.gluten.sql.columnar.backend.ch.shuffle.hash.algorithm", "sparkMurmurHash3_32")
.set(
"spark.gluten.sql.columnar.backend.ch.runtime_config.hdfs.dfs_client_read_shortcircuit",
"false")
.set(
"spark.gluten.sql.columnar.backend.ch.runtime_config.hdfs.dfs_default_replica",
"1")
}
override protected def createTPCHNotNullTables(): Unit = {
createNotNullTPCHTablesInParquet(tablesPath)
Expand Down

0 comments on commit 856a621

Please sign in to comment.