Skip to content

Commit 6e614d4

Browse files
committed
Implement drop catalog.database syntax
1 parent f4a39bf commit 6e614d4

File tree

7 files changed

+128
-17
lines changed

7 files changed

+128
-17
lines changed

parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1226,8 +1226,8 @@ switchDatabaseStatement
12261226
dropDatabaseStatement
12271227
@init { pushMsg("drop database statement", state); }
12281228
@after { popMsg(state); }
1229-
: KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? identifier restrictOrCascade?
1230-
-> ^(TOK_DROPDATABASE identifier ifExists? restrictOrCascade?)
1229+
: KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? databaseName restrictOrCascade?
1230+
-> ^(TOK_DROPDATABASE databaseName ifExists? restrictOrCascade?)
12311231
;
12321232
12331233
databaseComment

ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,11 @@
1818

1919
package org.apache.hadoop.hive.ql.ddl.database.drop;
2020

21+
import org.apache.commons.lang3.tuple.Pair;
2122
import org.apache.hadoop.hive.conf.HiveConf;
2223
import org.apache.hadoop.hive.metastore.api.Database;
2324
import org.apache.hadoop.hive.metastore.api.Function;
25+
import org.apache.hadoop.hive.ql.ErrorMsg;
2426
import org.apache.hadoop.hive.ql.QueryState;
2527
import org.apache.hadoop.hive.ql.exec.TaskFactory;
2628
import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
@@ -51,12 +53,17 @@ public DropDatabaseAnalyzer(QueryState queryState) throws SemanticException {
5153

5254
@Override
5355
public void analyzeInternal(ASTNode root) throws SemanticException {
54-
String databaseName = unescapeIdentifier(root.getChild(0).getText());
56+
Pair<String, String> catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0));
5557
boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null;
5658
boolean cascade = root.getFirstChildWithType(HiveParser.TOK_CASCADE) != null;
5759
boolean isSoftDelete = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_ACID_LOCKLESS_READS_ENABLED);
5860

59-
Database database = getDatabase(databaseName, !ifExists);
61+
String catalogName = catDbNamePair.getLeft();
62+
if (getCatalog(catalogName) == null) {
63+
throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName);
64+
}
65+
String databaseName = catDbNamePair.getRight();
66+
Database database = getDatabase(catDbNamePair.getLeft(), catDbNamePair.getRight(), ifExists);
6067
if (database == null) {
6168
return;
6269
}
@@ -72,7 +79,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException {
7279
HiveConf hiveConf = new HiveConf(conf);
7380
hiveConf.set("hive.metastore.client.filter.enabled", "false");
7481
newDb = Hive.get(hiveConf);
75-
List<Table> tables = newDb.getAllTableObjects(databaseName);
82+
List<Table> tables = newDb.getAllTableObjects(catalogName, databaseName);
7683
isDbLevelLock = !isSoftDelete || tables.stream().allMatch(
7784
table -> AcidUtils.isTableSoftDeleteEnabled(table, conf));
7885
for (Table table : tables) {
@@ -85,7 +92,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException {
8592
outputs.add(new WriteEntity(table, lockType));
8693
}
8794
// fetch all the functions in the database
88-
List<Function> functions = db.getFunctionsInDb(databaseName, ".*");
95+
List<Function> functions = db.getFunctionsInDb(catalogName, databaseName, ".*");
8996
for (Function func: functions) {
9097
outputs.add(new WriteEntity(func, WriteEntity.WriteType.DDL_NO_LOCK));
9198
}
@@ -111,7 +118,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException {
111118
WriteEntity.WriteType.DDL_EXCL_WRITE : WriteEntity.WriteType.DDL_EXCLUSIVE;
112119
outputs.add(new WriteEntity(database, lockType));
113120
}
114-
DropDatabaseDesc desc = new DropDatabaseDesc(databaseName, ifExists, cascade, new ReplicationSpec());
121+
DropDatabaseDesc desc = new DropDatabaseDesc(catalogName, databaseName, ifExists, cascade, new ReplicationSpec());
115122
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
116123
}
117124

ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232
public class DropDatabaseDesc implements DDLDesc, Serializable {
3333
private static final long serialVersionUID = 1L;
3434

35+
private final String catalogName;
3536
private final String databaseName;
3637
private final boolean ifExists;
3738
private final boolean cascade;
@@ -40,21 +41,27 @@ public class DropDatabaseDesc implements DDLDesc, Serializable {
4041
private boolean deleteData = true;
4142

4243
public DropDatabaseDesc(String databaseName, boolean ifExists, ReplicationSpec replicationSpec) {
43-
this(databaseName, ifExists, false, replicationSpec);
44+
this(null, databaseName, ifExists, false, replicationSpec); //TODO check the actual catalog
4445
}
4546

46-
public DropDatabaseDesc(String databaseName, boolean ifExists, boolean cascade, ReplicationSpec replicationSpec) {
47+
public DropDatabaseDesc(String catalogName, String databaseName, boolean ifExists, boolean cascade, ReplicationSpec replicationSpec) {
48+
this.catalogName = catalogName;
4749
this.databaseName = databaseName;
4850
this.ifExists = ifExists;
4951
this.cascade = cascade;
5052
this.replicationSpec = replicationSpec;
5153
}
5254

53-
public DropDatabaseDesc(String databaseName, boolean ifExists, boolean cascade, boolean deleteData) {
54-
this(databaseName, ifExists, cascade, null);
55+
public DropDatabaseDesc(String catalogName, String databaseName, boolean ifExists, boolean cascade, boolean deleteData) {
56+
this(catalogName, databaseName, ifExists, cascade, null);
5557
this.deleteData = deleteData;
5658
}
5759

60+
@Explain(displayName = "catalog", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
61+
public String getCatalogName() {
62+
return catalogName;
63+
}
64+
5865
@Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
5966
public String getDatabaseName() {
6067
return databaseName;

ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,10 +40,11 @@ public DropDatabaseOperation(DDLOperationContext context, DropDatabaseDesc desc)
4040
@Override
4141
public int execute() throws HiveException {
4242
try {
43+
String catName = desc.getCatalogName();
4344
String dbName = desc.getDatabaseName();
4445
ReplicationSpec replicationSpec = desc.getReplicationSpec();
4546
if (replicationSpec.isInReplicationScope()) {
46-
Database database = context.getDb().getDatabase(dbName);
47+
Database database = context.getDb().getDatabase(catName, dbName);
4748
if (database == null || !replicationSpec.allowEventReplacementInto(database.getParameters())) {
4849
return 0;
4950
}

ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java

Lines changed: 81 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -725,7 +725,7 @@ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownD
725725
*/
726726
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade)
727727
throws HiveException, NoSuchObjectException {
728-
dropDatabase(new DropDatabaseDesc(name, ignoreUnknownDb, cascade, deleteData));
728+
dropDatabase(new DropDatabaseDesc(getDefaultCatalog(conf) ,name, ignoreUnknownDb, cascade, deleteData)); //TODO check the actual catalog
729729
}
730730

731731
public void dropDatabase(DropDatabaseDesc desc)
@@ -737,7 +737,7 @@ public void dropDatabase(DropDatabaseDesc desc)
737737
.map(HiveTxnManager::getCurrentTxnId).orElse(0L);
738738

739739
DropDatabaseRequest req = new DropDatabaseRequest();
740-
req.setCatalogName(SessionState.get().getCurrentCatalog());
740+
req.setCatalogName(Objects.requireNonNullElse(desc.getCatalogName(), SessionState.get().getCurrentCatalog()));
741741
req.setName(desc.getDatabaseName());
742742
req.setIgnoreUnknownDb(desc.getIfExists());
743743
req.setDeleteData(desc.isDeleteData());
@@ -1872,6 +1872,17 @@ public List<Table> getAllTableObjects(String dbName) throws HiveException {
18721872
return getTableObjects(dbName, ".*", null);
18731873
}
18741874

1875+
/**
1876+
* Get all tables for the specified database.
1877+
* @param catName
1878+
* @param dbName
1879+
* @return List of all tables
1880+
* @throws HiveException
1881+
*/
1882+
public List<Table> getAllTableObjects(String catName, String dbName) throws HiveException {
1883+
return getTableObjects(catName, dbName, ".*", null);
1884+
}
1885+
18751886
/**
18761887
* Get all materialized view names for the specified database.
18771888
* @param dbName
@@ -1918,6 +1929,16 @@ public Table apply(org.apache.hadoop.hive.metastore.api.Table table) {
19181929
}
19191930
}
19201931

1932+
public List<Table> getTableObjects(String catName, String dbName, String pattern, TableType tableType) throws HiveException {
1933+
try {
1934+
return Lists.transform(getMSC().getTables(catName, dbName, getTablesByType(catName, dbName, pattern, tableType), null),
1935+
Table::new
1936+
);
1937+
} catch (Exception e) {
1938+
throw new HiveException(e);
1939+
}
1940+
}
1941+
19211942
/**
19221943
* Returns all existing tables from default database which match the given
19231944
* pattern. The matching occurs as per Java regular expressions
@@ -2001,6 +2022,52 @@ public List<String> getTablesByType(String dbName, String pattern, TableType typ
20012022
}
20022023
}
20032024

2025+
/**
2026+
* Returns all existing tables of a type (VIRTUAL_VIEW|EXTERNAL_TABLE|MANAGED_TABLE) from the specified
2027+
* database which match the given pattern. The matching occurs as per Java regular expressions.
2028+
* @param catName catalog name to find the tables in. if null, uses the current catalog in this session.
2029+
* @param dbName Database name to find the tables in. if null, uses the current database in this session.
2030+
* @param pattern A pattern to match for the table names.If null, returns all names from this DB.
2031+
* @param type The type of tables to return. VIRTUAL_VIEWS for views. If null, returns all tables and views.
2032+
* @return list of table names that match the pattern.
2033+
* @throws HiveException
2034+
*/
2035+
public List<String> getTablesByType(String catName, String dbName, String pattern, TableType type)
2036+
throws HiveException {
2037+
PerfLogger perfLogger = SessionState.getPerfLogger();
2038+
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_TABLE);
2039+
2040+
if (catName == null) {
2041+
dbName = SessionState.get().getCurrentCatalog();
2042+
}
2043+
2044+
if (dbName == null) {
2045+
dbName = SessionState.get().getCurrentDatabase();
2046+
}
2047+
2048+
try {
2049+
List<String> result;
2050+
if (type != null) {
2051+
if (pattern != null) {
2052+
result = getMSC().getTables(catName, dbName, pattern, type);
2053+
} else {
2054+
result = getMSC().getTables(catName, dbName, ".*", type);
2055+
}
2056+
} else {
2057+
if (pattern != null) {
2058+
result = getMSC().getTables(catName, dbName, pattern);
2059+
} else {
2060+
result = getMSC().getTables(catName, dbName, ".*");
2061+
}
2062+
}
2063+
return result;
2064+
} catch (Exception e) {
2065+
throw new HiveException(e);
2066+
} finally {
2067+
perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_TABLE, "HS2-cache");
2068+
}
2069+
}
2070+
20042071
/**
20052072
* Get the materialized views that have been enabled for rewriting from the
20062073
* cache (registry). It will preprocess them to discard those that are
@@ -6467,6 +6534,18 @@ public List<Function> getFunctionsInDb(String dbName, String pattern) throws Hiv
64676534
}
64686535
}
64696536

6537+
public List<Function> getFunctionsInDb(String catName, String dbName, String pattern) throws HiveException {
6538+
try {
6539+
GetFunctionsRequest request = new GetFunctionsRequest(dbName);
6540+
request.setPattern(pattern);
6541+
request.setCatalogName(Objects.requireNonNullElse(catName, SessionState.get().getCurrentCatalog()));
6542+
request.setReturnNames(false);
6543+
return getMSC().getFunctionsRequest(request).getFunctions();
6544+
} catch (TException te) {
6545+
throw new HiveException(te);
6546+
}
6547+
}
6548+
64706549
public void setMetaConf(String propName, String propValue) throws HiveException {
64716550
try {
64726551
getMSC().setMetaConf(propName, propValue);

ql/src/test/queries/clientpositive/catalog_database.q

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
set hive.mapred.mode=nonstrict;
2-
set hive.support.concurrency = true;
3-
41
-- CREATE DATABASE in default catalog 'hive'
52
CREATE DATABASE testdb;
63

@@ -26,3 +23,10 @@ SHOW DATABASES;
2623

2724
-- Switch database by catalog.db pattern
2825
USE testcat.testdb_new;
26+
27+
-- Drop database by catalog.db pattern
28+
DROP DATABASE testcat.testdb_new;
29+
30+
-- Check databases in catalog 'testcat',
31+
-- The list of databases in the catalog 'hive' should only contain the default.
32+
SHOW DATABASES;

ql/src/test/results/clientpositive/llap/catalog_database.q.out

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,3 +48,16 @@ PREHOOK: Input: database:testdb_new
4848
POSTHOOK: query: USE testcat.testdb_new
4949
POSTHOOK: type: SWITCHDATABASE
5050
POSTHOOK: Input: database:testdb_new
51+
PREHOOK: query: DROP DATABASE testcat.testdb_new
52+
PREHOOK: type: DROPDATABASE
53+
PREHOOK: Input: database:testdb_new
54+
PREHOOK: Output: database:testdb_new
55+
POSTHOOK: query: DROP DATABASE testcat.testdb_new
56+
POSTHOOK: type: DROPDATABASE
57+
POSTHOOK: Input: database:testdb_new
58+
POSTHOOK: Output: database:testdb_new
59+
PREHOOK: query: SHOW DATABASES
60+
PREHOOK: type: SHOWDATABASES
61+
POSTHOOK: query: SHOW DATABASES
62+
POSTHOOK: type: SHOWDATABASES
63+
default

0 commit comments

Comments
 (0)