Skip to content

Commit

Permalink
chore: try cleaning up some settings
Browse files Browse the repository at this point in the history
  • Loading branch information
cpcloud committed Sep 23, 2024
1 parent c0f94d4 commit b02fb34
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 13 deletions.
13 changes: 0 additions & 13 deletions docker/spark-connect/conf.properties
Original file line number Diff line number Diff line change
@@ -1,25 +1,12 @@
spark.cores.max=1
spark.default.parallelism=1
spark.driver.extraJavaOptions=-Duser.timezone=GMT
spark.dynamicAllocation.enabled=false
spark.executor.extraJavaOptions=-Duser.timezone=GMT
spark.executor.heartbeatInterval=3600s
spark.executor.instances=1
spark.jars.packages=org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.5.2
spark.network.timeout=4200s
spark.rdd.compress=false
spark.serializer=org.apache.spark.serializer.KryoSerializer
spark.shuffle.compress=false
spark.shuffle.spill.compress=false
spark.sql.catalog.local.type=hadoop
spark.sql.catalog.local.warehouse=warehouse
spark.sql.catalog.local=org.apache.iceberg.spark.SparkCatalog
spark.sql.execution.arrow.pyspark.enabled=false
spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions
spark.sql.legacy.timeParserPolicy=LEGACY
spark.sql.session.timeZone=UTC
spark.sql.shuffle.partitions=1
spark.sql.streaming.schemaInference=true
spark.storage.blockManagerSlaveTimeoutMs=4200s
spark.ui.enabled=false
spark.ui.showConsoleProgress=false
18 changes: 18 additions & 0 deletions ibis/backends/pyspark/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,6 +287,24 @@ def connect(*, tmpdir, worker_id, **kw):
for line in config_file:
config = config.config(*map(str.strip, line.strip().split("=", 1)))

config = (
config.config("spark.cores.max", "1")
.config("spark.default.parallelism", "1")
.config("spark.dynamicAllocation.enabled", "false")
.config("spark.executor.heartbeatInterval", "3600s")
.config("spark.executor.instances", "1")
.config("spark.network.timeout", "4200s")
.config("spark.rdd.compress", "false")
.config(
"spark.serializer", "org.apache.spark.serializer.KryoSerializer"
)
.config("spark.shuffle.compress", "false")
.config("spark.shuffle.spill.compress", "false")
.config("spark.sql.execution.arrow.pyspark.enabled", "false")
.config("spark.sql.shuffle.partitions", "1")
.config("spark.storage.blockManagerSlaveTimeoutMs", "4200s")
)

try:
from delta.pip_utils import configure_spark_with_delta_pip
except ImportError:
Expand Down

0 comments on commit b02fb34

Please sign in to comment.