Skip to content

Commit f4a39bf

Browse files
committed
HIVE-29177: Implement default Catalog selection
1 parent d90574c commit f4a39bf

File tree

17 files changed

+324
-24
lines changed

17 files changed

+324
-24
lines changed

common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -468,7 +468,7 @@ public enum ErrorMsg {
468468
RESOURCE_PLAN_ALREADY_EXISTS(10417, "Resource plan {0} already exists", true),
469469
RESOURCE_PLAN_NOT_EXISTS(10418, "Resource plan {0} does not exist", true),
470470
INCOMPATIBLE_STRUCT(10419, "Incompatible structs.", true),
471-
OBJECTNAME_CONTAINS_DOT(10420, "Table or database name may not contain dot(.) character", true),
471+
OBJECTNAME_CONTAINS_DOT(10420, "Catalog or table or database name may not contain dot(.) character", true),
472472
WITHIN_GROUP_NOT_ALLOWED(10421,
473473
"Not an ordered-set aggregate function: {0}. WITHIN GROUP clause is not allowed.", true),
474474
WITHIN_GROUP_PARAMETER_MISMATCH(10422,

parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -231,6 +231,17 @@ uniqueJoinTableSource
231231
-> ^(TOK_TABREF $tabname $ts? $alias?)
232232
;
233233

234+
databaseName
235+
@init { gParent.pushMsg("database name", state); }
236+
@after { gParent.popMsg(state); }
237+
:
238+
catalog=identifier DOT db=identifier?
239+
-> ^(TOK_DBNAME $catalog $db)
240+
|
241+
db=identifier
242+
-> ^(TOK_DBNAME $db)
243+
;
244+
234245
tableName
235246
@init { gParent.pushMsg("table name", state); }
236247
@after { gParent.popMsg(state); }

parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -378,6 +378,7 @@ TOK_DESCCATALOG;
378378
TOK_CATALOGLOCATION;
379379
TOK_CATALOGCOMMENT;
380380
TOK_ALTERCATALOG_LOCATION;
381+
TOK_SWITCHCATALOG;
381382
TOK_DESCDATABASE;
382383
TOK_DATABASEPROPERTIES;
383384
TOK_DATABASELOCATION;
@@ -1011,6 +1012,7 @@ ddlStatement
10111012
@after { popMsg(state); }
10121013
: createCatalogStatement
10131014
| dropCatalogStatement
1015+
| switchCatalogStatement
10141016
| createDatabaseStatement
10151017
| switchDatabaseStatement
10161018
| dropDatabaseStatement
@@ -1151,6 +1153,13 @@ dropCatalogStatement
11511153
-> ^(TOK_DROPCATALOG identifier ifExists?)
11521154
;
11531155
1156+
switchCatalogStatement
1157+
@init { pushMsg("switch catalog statement", state); }
1158+
@after { popMsg(state); }
1159+
: KW_SET KW_CATALOG identifier
1160+
-> ^(TOK_SWITCHCATALOG identifier)
1161+
;
1162+
11541163
createDatabaseStatement
11551164
@init { pushMsg("create database statement", state); }
11561165
@after { popMsg(state); }
@@ -1210,8 +1219,8 @@ dbConnectorName
12101219
switchDatabaseStatement
12111220
@init { pushMsg("switch database statement", state); }
12121221
@after { popMsg(state); }
1213-
: KW_USE identifier
1214-
-> ^(TOK_SWITCHDATABASE identifier)
1222+
: KW_USE databaseName
1223+
-> ^(TOK_SWITCHDATABASE databaseName)
12151224
;
12161225
12171226
dropDatabaseStatement
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.hive.ql.ddl.catalog.use;
20+
21+
import org.apache.hadoop.hive.metastore.api.Catalog;
22+
import org.apache.hadoop.hive.ql.QueryState;
23+
import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
24+
import org.apache.hadoop.hive.ql.ddl.DDLWork;
25+
import org.apache.hadoop.hive.ql.exec.TaskFactory;
26+
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
27+
import org.apache.hadoop.hive.ql.parse.ASTNode;
28+
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
29+
import org.apache.hadoop.hive.ql.parse.HiveParser;
30+
import org.apache.hadoop.hive.ql.parse.SemanticException;
31+
32+
/**
33+
* Analyzer for catalog switching commands.
34+
*/
35+
@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_SWITCHCATALOG)
36+
public class SwitchCatalogAnalyzer extends BaseSemanticAnalyzer {
37+
public SwitchCatalogAnalyzer(QueryState queryState) throws SemanticException {
38+
super(queryState);
39+
}
40+
41+
@Override
42+
public void analyzeInternal(ASTNode root) throws SemanticException {
43+
String catlogName = unescapeIdentifier(root.getChild(0).getText());
44+
45+
Catalog catalog = getCatalog(catlogName);
46+
ReadEntity readEntity = new ReadEntity(catalog);
47+
readEntity.noLockNeeded();
48+
inputs.add(readEntity);
49+
50+
SwitchCatalogDesc desc = new SwitchCatalogDesc(catlogName);
51+
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
52+
}
53+
}
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.hive.ql.ddl.catalog.use;
20+
21+
import org.apache.hadoop.hive.ql.ddl.DDLDesc;
22+
import org.apache.hadoop.hive.ql.plan.Explain;
23+
24+
import java.io.Serializable;
25+
26+
/**
27+
* DDL task description for SET CATALOG commands.
28+
*/
29+
@Explain(displayName = "Switch Catalog", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED })
30+
public class SwitchCatalogDesc implements DDLDesc, Serializable {
31+
private static final long serialVersionUID = 1L;
32+
33+
private final String catalogName;
34+
35+
public SwitchCatalogDesc(String databaseName) {
36+
this.catalogName = databaseName;
37+
}
38+
39+
@Explain(displayName = "name", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED })
40+
public String getCatalogName() {
41+
return catalogName;
42+
}
43+
}
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.hive.ql.ddl.catalog.use;
20+
21+
import java.util.Map;
22+
23+
import org.apache.hadoop.hive.conf.HiveConf;
24+
import org.apache.hadoop.hive.metastore.api.Catalog;
25+
import org.apache.hadoop.hive.metastore.api.Database;
26+
import org.apache.hadoop.hive.ql.ErrorMsg;
27+
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
28+
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
29+
import org.apache.hadoop.hive.ql.metadata.HiveException;
30+
import org.apache.hadoop.hive.ql.session.SessionState;
31+
32+
/**
33+
* Operation process of switching to another catalog.
34+
*/
35+
public class SwitchCatalogOperation extends DDLOperation<SwitchCatalogDesc> {
36+
public SwitchCatalogOperation(DDLOperationContext context, SwitchCatalogDesc desc) {
37+
super(context, desc);
38+
}
39+
40+
@Override
41+
public int execute() throws HiveException {
42+
String catalogName = desc.getCatalogName();
43+
if (context.getDb().getCatalog(catalogName) == null) {
44+
throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName);
45+
}
46+
47+
SessionState.get().setCurrentCatalog(catalogName);
48+
return 0;
49+
}
50+
}

ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,12 +55,12 @@ public int execute() throws HiveException {
5555
if (desc.getManagedLocationUri() != null) {
5656
database.setManagedLocationUri(desc.getManagedLocationUri());
5757
}
58-
makeLocationQualified(database);
58+
makeLocationQualified(database); // TODO add catalog prefix for db location
5959
if (database.getLocationUri().equalsIgnoreCase(database.getManagedLocationUri())) {
6060
throw new HiveException("Managed and external locations for database cannot be the same");
6161
}
6262
} else if (desc.getDatabaseType() == DatabaseType.REMOTE) {
63-
makeLocationQualified(database);
63+
makeLocationQualified(database); // TODO add catalog prefix for db location
6464
database.setConnector_name(desc.getConnectorName());
6565
database.setRemote_dbname(desc.getRemoteDbName());
6666
} else { // should never be here

ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,12 +52,12 @@ public int execute() throws HiveException {
5252

5353
if (LlapHiveUtils.isLlapMode(context.getConf())) {
5454
ProactiveEviction.Request.Builder llapEvictRequestBuilder = ProactiveEviction.Request.Builder.create();
55-
llapEvictRequestBuilder.addDb(dbName);
55+
llapEvictRequestBuilder.addDb(dbName); //TODO add catalog for the cache
5656
ProactiveEviction.evict(context.getConf(), llapEvictRequestBuilder.build());
5757
}
5858
// Unregister the functions as well
5959
if (desc.isCasdade()) {
60-
FunctionRegistry.unregisterPermanentFunctions(dbName);
60+
FunctionRegistry.unregisterPermanentFunctions(dbName); //TODO add catalog for the cache
6161
}
6262
} catch (NoSuchObjectException ex) {
6363
throw new HiveException(ex, ErrorMsg.DATABASE_NOT_EXISTS, desc.getDatabaseName());

ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
package org.apache.hadoop.hive.ql.ddl.database.use;
2020

21+
import org.apache.commons.lang3.tuple.Pair;
2122
import org.apache.hadoop.hive.metastore.api.Database;
2223
import org.apache.hadoop.hive.ql.QueryState;
2324
import org.apache.hadoop.hive.ql.exec.TaskFactory;
@@ -40,14 +41,13 @@ public SwitchDatabaseAnalyzer(QueryState queryState) throws SemanticException {
4041

4142
@Override
4243
public void analyzeInternal(ASTNode root) throws SemanticException {
43-
String databaseName = unescapeIdentifier(root.getChild(0).getText());
44-
45-
Database database = getDatabase(databaseName, true);
44+
Pair<String, String> catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0));
45+
Database database = getDatabase(catDbNamePair.getLeft(), catDbNamePair.getRight(), true);
4646
ReadEntity readEntity = new ReadEntity(database);
4747
readEntity.noLockNeeded();
4848
inputs.add(readEntity);
4949

50-
SwitchDatabaseDesc desc = new SwitchDatabaseDesc(databaseName);
50+
SwitchDatabaseDesc desc = new SwitchDatabaseDesc(catDbNamePair.getRight());
5151
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
5252
}
5353
}

ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -222,6 +222,7 @@
222222
import java.util.List;
223223
import java.util.Map.Entry;
224224
import java.util.Map;
225+
import java.util.Objects;
225226
import java.util.Optional;
226227
import java.util.Set;
227228
import java.util.concurrent.Callable;
@@ -667,6 +668,7 @@ public void dropCatalog(String catName, boolean ignoreUnknownCat)
667668
public void createDatabase(Database db, boolean ifNotExist)
668669
throws AlreadyExistsException, HiveException {
669670
try {
671+
db.setCatalogName(SessionState.get().getCurrentCatalog());
670672
getMSC().createDatabase(db);
671673
} catch (AlreadyExistsException e) {
672674
if (!ifNotExist) {
@@ -735,7 +737,7 @@ public void dropDatabase(DropDatabaseDesc desc)
735737
.map(HiveTxnManager::getCurrentTxnId).orElse(0L);
736738

737739
DropDatabaseRequest req = new DropDatabaseRequest();
738-
req.setCatalogName(getDefaultCatalog(conf));
740+
req.setCatalogName(SessionState.get().getCurrentCatalog());
739741
req.setName(desc.getDatabaseName());
740742
req.setIgnoreUnknownDb(desc.getIfExists());
741743
req.setDeleteData(desc.isDeleteData());
@@ -1428,7 +1430,8 @@ public void createTable(Table tbl, boolean ifNotExists,
14281430
}
14291431

14301432
public void createTable(Table tbl, boolean ifNotExists) throws HiveException {
1431-
createTable(tbl, ifNotExists, null, null, null, null,
1433+
tbl.setCatalogName(Objects.requireNonNullElse(tbl.getCatName(), SessionState.get().getCurrentCatalog()));
1434+
createTable(tbl, ifNotExists, null, null, null, null,
14321435
null, null);
14331436
}
14341437

@@ -1463,6 +1466,7 @@ public void dropTable(Table table, boolean ifPurge) throws HiveException {
14631466
long txnId = Optional.ofNullable(SessionState.get())
14641467
.map(ss -> ss.getTxnMgr().getCurrentTxnId()).orElse(0L);
14651468
table.getTTable().setTxnId(txnId);
1469+
table.setCatalogName(Objects.requireNonNullElse(table.getCatName(), SessionState.get().getCurrentCatalog()));
14661470

14671471
dropTable(table.getTTable(), !tableWithSuffix, true, ifPurge);
14681472
}
@@ -1978,15 +1982,15 @@ public List<String> getTablesByType(String dbName, String pattern, TableType typ
19781982
List<String> result;
19791983
if (type != null) {
19801984
if (pattern != null) {
1981-
result = getMSC().getTables(dbName, pattern, type);
1985+
result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, pattern, type);
19821986
} else {
1983-
result = getMSC().getTables(dbName, ".*", type);
1987+
result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, ".*", type);
19841988
}
19851989
} else {
19861990
if (pattern != null) {
1987-
result = getMSC().getTables(dbName, pattern);
1991+
result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, pattern);
19881992
} else {
1989-
result = getMSC().getTables(dbName, ".*");
1993+
result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, ".*");
19901994
}
19911995
}
19921996
return result;
@@ -2445,7 +2449,7 @@ public List<HiveRelOptMaterialization> getMaterializedViewsByAST(
24452449
*/
24462450
public List<String> getAllDatabases() throws HiveException {
24472451
try {
2448-
return getMSC().getAllDatabases();
2452+
return getMSC().getAllDatabases(SessionState.get().getCurrentCatalog());
24492453
} catch (Exception e) {
24502454
throw new HiveException(e);
24512455
}
@@ -2551,7 +2555,7 @@ public Database getDatabase(String dbName) throws HiveException {
25512555
PerfLogger perfLogger = SessionState.getPerfLogger();
25522556
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_DATABASE);
25532557
try {
2554-
return getMSC().getDatabase(dbName);
2558+
return getMSC().getDatabase(SessionState.get().getCurrentCatalog(), dbName);
25552559
} catch (NoSuchObjectException e) {
25562560
return null;
25572561
} catch (Exception e) {

0 commit comments

Comments
 (0)