Skip to content

Commit

Permalink
chore(pyspark): factor out some repeatedly used properties
Browse files Browse the repository at this point in the history
  • Loading branch information
cpcloud committed Sep 16, 2024
1 parent 784a02a commit 7c69b36
Showing 1 changed file with 11 additions and 5 deletions.
16 changes: 11 additions & 5 deletions ibis/backends/pyspark/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import sqlglot.expressions as sge
from packaging.version import parse as vparse
from pyspark import SparkConf
from pyspark.errors import AnalysisException
from pyspark.sql import SparkSession
from pyspark.sql.types import BooleanType, DoubleType, LongType, StringType

Expand All @@ -32,8 +31,10 @@
from ibis.util import deprecated

try:
from pyspark.errors import AnalysisException
from pyspark.errors import ParseException as PySparkParseException
except ImportError:
from pyspark.sql.utils import AnalysisException
from pyspark.sql.utils import ParseException as PySparkParseException

if TYPE_CHECKING:
Expand Down Expand Up @@ -413,11 +414,16 @@ def _register_udfs(self, expr: ir.Expr) -> None:
self._session.udf.register("unwrap_json_float", unwrap_json_float)

def _register_in_memory_table(self, op: ops.InMemoryTable) -> None:
schema = PySparkSchema.from_ibis(op.schema)
df = self._session.createDataFrame(data=op.data.to_frame(), schema=schema)
session = self._session
df = session.createDataFrame(
data=op.data.to_frame(), schema=PySparkSchema.from_ibis(op.schema)
)
name = op.name

with contextlib.suppress(AnalysisException):
self._session.catalog.dropTempView(op.name)
df.createTempView(op.name)
session.catalog.dropTempView(name)

df.createTempView(name)

def _finalize_memtable(self, name: str) -> None:
self._session.catalog.dropTempView(name)
Expand Down

0 comments on commit 7c69b36

Please sign in to comment.