diff --git a/ibis/backends/pyspark/__init__.py b/ibis/backends/pyspark/__init__.py index e5bb8612f90ef..204a7064e881b 100644 --- a/ibis/backends/pyspark/__init__.py +++ b/ibis/backends/pyspark/__init__.py @@ -11,7 +11,6 @@ import sqlglot.expressions as sge from packaging.version import parse as vparse from pyspark import SparkConf -from pyspark.errors import AnalysisException from pyspark.sql import SparkSession from pyspark.sql.types import BooleanType, DoubleType, LongType, StringType @@ -32,8 +31,10 @@ from ibis.util import deprecated try: + from pyspark.errors import AnalysisException from pyspark.errors import ParseException as PySparkParseException except ImportError: + from pyspark.sql.utils import AnalysisException from pyspark.sql.utils import ParseException as PySparkParseException if TYPE_CHECKING: @@ -413,11 +414,16 @@ def _register_udfs(self, expr: ir.Expr) -> None: self._session.udf.register("unwrap_json_float", unwrap_json_float) def _register_in_memory_table(self, op: ops.InMemoryTable) -> None: - schema = PySparkSchema.from_ibis(op.schema) - df = self._session.createDataFrame(data=op.data.to_frame(), schema=schema) + session = self._session + df = session.createDataFrame( + data=op.data.to_frame(), schema=PySparkSchema.from_ibis(op.schema) + ) + name = op.name + with contextlib.suppress(AnalysisException): - self._session.catalog.dropTempView(op.name) - df.createTempView(op.name) + session.catalog.dropTempView(name) + + df.createTempView(name) def _finalize_memtable(self, name: str) -> None: self._session.catalog.dropTempView(name)