diff --git a/connectors/spark/src/main/scala/io/unitycatalog/connectors/spark/UCSingleCatalog.scala b/connectors/spark/src/main/scala/io/unitycatalog/connectors/spark/UCSingleCatalog.scala index 9b7461c79..2f48c6b32 100644 --- a/connectors/spark/src/main/scala/io/unitycatalog/connectors/spark/UCSingleCatalog.scala +++ b/connectors/spark/src/main/scala/io/unitycatalog/connectors/spark/UCSingleCatalog.scala @@ -2,7 +2,7 @@ package io.unitycatalog.connectors.spark import io.unitycatalog.client.{ApiClient, ApiException} import io.unitycatalog.client.api.{TablesApi, TemporaryTableCredentialsApi} -import io.unitycatalog.client.model.{AwsCredentials, GenerateTemporaryTableCredential, TableOperation, TableType} +import io.unitycatalog.client.model.{AwsCredentials, GenerateTemporaryTableCredential, ListTablesResponse, TableOperation, TableType} import java.net.URI import java.util @@ -14,6 +14,7 @@ import org.apache.spark.sql.connector.expressions.Transform import org.apache.spark.sql.types.{DataType, StructField, StructType} import org.apache.spark.sql.util.CaseInsensitiveStringMap +import scala.collection.convert.ImplicitConversions._ import scala.jdk.CollectionConverters._ /** @@ -33,7 +34,7 @@ class UCSingleCatalog extends TableCatalog { override def name(): String = deltaCatalog.name() - override def listTables(namespace: Array[String]): Array[Identifier] = ??? + override def listTables(namespace: Array[String]): Array[Identifier] = deltaCatalog.listTables(namespace) override def loadTable(ident: Identifier): Table = deltaCatalog.loadTable(ident) @@ -91,7 +92,18 @@ private class UCProxy extends TableCatalog { this.name } - override def listTables(namespace: Array[String]): Array[Identifier] = ??? + override def listTables(namespace: Array[String]): Array[Identifier] = { + if (namespace.length > 1) { + throw new ApiException("Nested namespaces are not supported: " + namespace.mkString(".")) + } + val catalogName = this.name + val schemaName = namespace.head + val maxResults = 0 + val pageToken = null + val response: ListTablesResponse = tablesApi.listTables(catalogName, schemaName, maxResults, pageToken) + response.getTables.toSeq.map(table => Identifier.of(namespace, table.getName)).toArray + } + override def loadTable(ident: Identifier): Table = { val t = try { diff --git a/connectors/spark/src/test/java/io/unitycatalog/connectors/spark/SparkIntegrationTest.java b/connectors/spark/src/test/java/io/unitycatalog/connectors/spark/SparkIntegrationTest.java index cf10198d0..b16e5def8 100644 --- a/connectors/spark/src/test/java/io/unitycatalog/connectors/spark/SparkIntegrationTest.java +++ b/connectors/spark/src/test/java/io/unitycatalog/connectors/spark/SparkIntegrationTest.java @@ -1,8 +1,7 @@ package io.unitycatalog.connectors.spark; import static io.unitycatalog.server.utils.TestUtils.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; import io.unitycatalog.client.ApiException; import io.unitycatalog.client.model.*; @@ -19,6 +18,7 @@ import java.io.IOException; import java.util.*; import org.apache.spark.network.util.JavaUtils; +import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; import org.junit.jupiter.api.BeforeEach; @@ -143,6 +143,24 @@ public void testCredentialDelta() throws ApiException, IOException { session.stop(); } + @Test + public void testShowTables() throws ApiException, IOException { + createCommonResources(); + SparkSession session = createSparkSessionWithCatalogs(SPARK_CATALOG); + setupExternalParquetTable(PARQUET_TABLE, new ArrayList<>(0)); + + Row[] tables = (Row[]) session.sql("SHOW TABLES in " + SCHEMA_NAME).collect(); + assertEquals(tables.length, 1); + assertEquals(tables[0].getString(0), SCHEMA_NAME); + assertEquals(tables[0].getString(1), PARQUET_TABLE); + + AnalysisException exception = + assertThrows(AnalysisException.class, () -> session.sql("SHOW TABLES in a.b.c").collect()); + assertTrue(exception.getMessage().contains("a.b.c")); + + session.stop(); + } + private String generateTableLocation(String catalogName, String tableName) throws IOException { return new File(new File(dataDir, catalogName), tableName).getCanonicalPath(); }