diff --git a/connectors/spark/src/main/scala/io/unitycatalog/connectors/spark/UCSingleCatalog.scala b/connectors/spark/src/main/scala/io/unitycatalog/connectors/spark/UCSingleCatalog.scala index 2f48c6b32..ec470d30c 100644 --- a/connectors/spark/src/main/scala/io/unitycatalog/connectors/spark/UCSingleCatalog.scala +++ b/connectors/spark/src/main/scala/io/unitycatalog/connectors/spark/UCSingleCatalog.scala @@ -55,7 +55,7 @@ class UCSingleCatalog extends TableCatalog { } override def alterTable(ident: Identifier, changes: TableChange*): Table = ??? - override def dropTable(ident: Identifier): Boolean = ??? + override def dropTable(ident: Identifier): Boolean = deltaCatalog.dropTable(ident) override def renameTable(oldIdent: Identifier, newIdent: Identifier): Unit = ??? } @@ -93,9 +93,8 @@ private class UCProxy extends TableCatalog { } override def listTables(namespace: Array[String]): Array[Identifier] = { - if (namespace.length > 1) { - throw new ApiException("Nested namespaces are not supported: " + namespace.mkString(".")) - } + checkUnsupportedNestedNamespace(namespace) + val catalogName = this.name val schemaName = namespace.head val maxResults = 0 @@ -174,8 +173,18 @@ private class UCProxy extends TableCatalog { override def alterTable(ident: Identifier, changes: TableChange*): Table = ??? - override def dropTable(ident: Identifier): Boolean = ??? + override def dropTable(ident: Identifier): Boolean = { + checkUnsupportedNestedNamespace(ident.namespace()) + val ret = + tablesApi.deleteTable(Seq(this.name, ident.namespace()(0), ident.name()).mkString(".")) + if (ret == 200) true else false + } override def renameTable(oldIdent: Identifier, newIdent: Identifier): Unit = ??? + private def checkUnsupportedNestedNamespace(namespace: Array[String]): Unit = { + if (namespace.length > 1) { + throw new ApiException("Nested namespaces are not supported: " + namespace.mkString(".")) + } + } } diff --git a/connectors/spark/src/test/java/io/unitycatalog/connectors/spark/SparkIntegrationTest.java b/connectors/spark/src/test/java/io/unitycatalog/connectors/spark/SparkIntegrationTest.java index b16e5def8..2d96b1214 100644 --- a/connectors/spark/src/test/java/io/unitycatalog/connectors/spark/SparkIntegrationTest.java +++ b/connectors/spark/src/test/java/io/unitycatalog/connectors/spark/SparkIntegrationTest.java @@ -161,6 +161,20 @@ public void testShowTables() throws ApiException, IOException { session.stop(); } + @Test + public void testDropTable() throws ApiException, IOException { + createCommonResources(); + SparkSession session = createSparkSessionWithCatalogs(SPARK_CATALOG); + setupExternalParquetTable(PARQUET_TABLE, new ArrayList<>(0)); + String fullName = String.join(".", SPARK_CATALOG, SCHEMA_NAME, PARQUET_TABLE); + assertTrue(session.catalog().tableExists(fullName)); + session.sql("DROP TABLE " + fullName).collect(); + assertFalse(session.catalog().tableExists(fullName)); + AnalysisException exception = + assertThrows(AnalysisException.class, () -> session.sql("DROP TABLE a.b.c.d").collect()); + session.stop(); + } + private String generateTableLocation(String catalogName, String tableName) throws IOException { return new File(new File(dataDir, catalogName), tableName).getCanonicalPath(); }