Skip to content

Commit 79ec7f3

Browse files
authored
HIVE-28782: Ensure Hive constructs URI's with IPv6 literals safely. (#5758) (Dmitriy Fingerman, reviewed by Ayush Saxena)
* HIVE-28782: Ensure Hive constructs URI's with IPv6 literals safely.
1 parent b7a925b commit 79ec7f3

File tree

40 files changed

+147
-99
lines changed

40 files changed

+147
-99
lines changed

beeline/src/java/org/apache/hive/beeline/hs2connection/HiveSiteHS2ConnectionFileParser.java

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import org.apache.hadoop.hive.common.ServerUtils;
2727
import org.apache.hadoop.hive.conf.HiveConf;
2828
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
29+
import org.apache.hadoop.hive.common.IPStackUtils;
2930
import org.slf4j.Logger;
3031
import org.slf4j.LoggerFactory;
3132

@@ -119,8 +120,7 @@ private void addHosts(Properties props) throws BeelineHS2ConnectionFileParseExce
119120
}
120121
}
121122

122-
private void addZKServiceDiscoveryHosts(Properties props)
123-
throws BeelineHS2ConnectionFileParseException {
123+
private void addZKServiceDiscoveryHosts(Properties props) {
124124
props.setProperty("serviceDiscoveryMode", "zooKeeper");
125125
props.setProperty("zooKeeperNamespace",
126126
HiveConf.getVar(conf, ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE));
@@ -141,7 +141,9 @@ private void addDefaultHS2Hosts(Properties props) throws BeelineHS2ConnectionFil
141141
}
142142
int portNum = getPortNum(
143143
"http".equalsIgnoreCase(HiveConf.getVar(conf, ConfVars.HIVE_SERVER2_TRANSPORT_MODE)));
144-
props.setProperty("hosts", serverIPAddress.getHostName() + ":" + portNum);
144+
// The hosts property is used in the constructing connection URL, serverIPAddress.getHostName() might return an
145+
// IP address depending on the configuration, hence need to properly escape a possible IPv6 literal
146+
props.setProperty("hosts", IPStackUtils.concatHostPort(serverIPAddress.getHostName(), portNum));
145147
}
146148

147149
private int getPortNum(boolean isHttp) {

beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,10 @@
2626
import java.sql.Statement;
2727

2828
import org.apache.commons.io.FileUtils;
29+
import org.apache.hadoop.hive.common.IPStackUtils;
2930
import org.apache.hive.jdbc.HiveConnection;
3031
import org.apache.hive.service.auth.HiveAuthConstants;
3132
import org.apache.hive.service.cli.session.SessionUtils;
32-
import org.apache.hive.beeline.BeeLine;
3333
import org.apache.hadoop.hive.shims.Utils;
3434

3535
/**
@@ -84,7 +84,7 @@ public static void main(String[] args) throws Exception {
8484
/*
8585
* Connect via kerberos and get delegation token
8686
*/
87-
url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
87+
url = String.format("jdbc:hive2://%s/default;principal=%s", IPStackUtils.concatHostPort(host, port), serverPrincipal);
8888
con = DriverManager.getConnection(url);
8989
System.out.println("Connected successfully to " + url);
9090
// get delegation token for the given proxy user
@@ -98,31 +98,31 @@ public static void main(String[] args) throws Exception {
9898
System.setProperty(BEELINE_EXIT, "true");
9999

100100
// connect using principal via Beeline with inputStream
101-
url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
101+
url = String.format("jdbc:hive2://%s/default;principal=%s", IPStackUtils.concatHostPort(host, port), serverPrincipal);
102102
currentResultFile = generateSQL(null);
103103
beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar"};
104104
System.out.println("Connection with kerberos, user/password via args, using input rediction");
105105
BeeLine.mainWithInputRedirection(beeLineArgs, inpStream);
106106
compareResults( currentResultFile);
107107

108108
// connect using principal via Beeline with inputStream
109-
url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
109+
url = String.format("jdbc:hive2://%s/default;principal=%s", IPStackUtils.concatHostPort(host, port), serverPrincipal);
110110
currentResultFile = generateSQL(null);
111111
beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar", "-f" , scriptFileName};
112112
System.out.println("Connection with kerberos, user/password via args, using input script");
113113
BeeLine.main(beeLineArgs);
114114
compareResults( currentResultFile);
115115

116116
// connect using principal via Beeline with inputStream
117-
url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
117+
url = String.format("jdbc:hive2://%s/default;principal=%s", IPStackUtils.concatHostPort(host, port), serverPrincipal);
118118
currentResultFile = generateSQL(url+ " foo bar ");
119119
beeLineArgs = new String[] { "-u", url, "-f" , scriptFileName};
120120
System.out.println("Connection with kerberos, user/password via connect, using input script");
121121
BeeLine.main(beeLineArgs);
122122
compareResults( currentResultFile);
123123

124124
// connect using principal via Beeline with inputStream
125-
url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
125+
url = String.format("jdbc:hive2://%s/default;principal=%s", IPStackUtils.concatHostPort(host, port), serverPrincipal);
126126
currentResultFile = generateSQL(url+ " foo bar ");
127127
beeLineArgs = new String[] { "-u", url, "-f" , scriptFileName};
128128
System.out.println("Connection with kerberos, user/password via connect, using input redirect");
@@ -134,22 +134,22 @@ public static void main(String[] args) throws Exception {
134134
*/
135135
System.out.println("Store token into ugi and try");
136136
storeTokenInJobConf(token);
137-
url = "jdbc:hive2://" + host + ":" + port + "/default;auth=delegationToken";
137+
url = String.format("jdbc:hive2://%s/default;auth=delegationToken", IPStackUtils.concatHostPort(host, port));
138138
con = DriverManager.getConnection(url);
139139
System.out.println("Connecting to " + url);
140140
runTest();
141141
con.close();
142142

143143
// connect using token via Beeline with inputStream
144-
url = "jdbc:hive2://" + host + ":" + port + "/default";
144+
url = String.format("jdbc:hive2://%s/default", IPStackUtils.concatHostPort(host, port));
145145
currentResultFile = generateSQL(null);
146146
beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar", "-a", "delegationToken" };
147147
System.out.println("Connection with token, user/password via args, using input redirection");
148148
BeeLine.mainWithInputRedirection(beeLineArgs, inpStream);
149149
compareResults( currentResultFile);
150150

151151
// connect using token via Beeline using script
152-
url = "jdbc:hive2://" + host + ":" + port + "/default";
152+
url = String.format("jdbc:hive2://%s/default", IPStackUtils.concatHostPort(host, port));
153153
currentResultFile = generateSQL(null);
154154
beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar", "-a", "delegationToken",
155155
"-f", scriptFileName};
@@ -158,15 +158,15 @@ public static void main(String[] args) throws Exception {
158158
compareResults( currentResultFile);
159159

160160
// connect using token via Beeline using script
161-
url = "jdbc:hive2://" + host + ":" + port + "/default";
161+
url = String.format("jdbc:hive2://%s/default", IPStackUtils.concatHostPort(host, port));
162162
currentResultFile = generateSQL(url + " foo bar ");
163163
beeLineArgs = new String [] {"-a", "delegationToken", "-f", scriptFileName};
164164
System.out.println("Connection with token, user/password via connect, using input script");
165165
BeeLine.main(beeLineArgs);
166166
compareResults( currentResultFile);
167167

168168
// connect using token via Beeline using script
169-
url = "jdbc:hive2://" + host + ":" + port + "/default";
169+
url = String.format("jdbc:hive2://%s/default", IPStackUtils.concatHostPort(host, port));
170170
currentResultFile = generateSQL(url + " foo bar ");
171171
System.out.println("Connection with token, user/password via connect, using input script");
172172
beeLineArgs = new String [] {"-f", scriptFileName, "-a", "delegationToken"};
@@ -176,8 +176,8 @@ public static void main(String[] args) throws Exception {
176176
/*
177177
* Connect via kerberos with trusted proxy user
178178
*/
179-
url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal
180-
+ ";hive.server2.proxy.user=" + proxyUser;
179+
url = String.format("jdbc:hive2://%s/default;principal=%s;hive.server2.proxy.user=%s",
180+
IPStackUtils.concatHostPort(host, port), serverPrincipal, proxyUser);
181181
con = DriverManager.getConnection(url);
182182
System.out.println("Connected successfully to " + url);
183183
runTest();
@@ -191,7 +191,7 @@ public static void main(String[] args) throws Exception {
191191

192192
/* verify the connection fails after canceling the token */
193193
try {
194-
url = "jdbc:hive2://" + host + ":" + port + "/default;auth=delegationToken";
194+
url = String.format("jdbc:hive2://%s/default;auth=delegationToken", IPStackUtils.concatHostPort(host, port));
195195
con = DriverManager.getConnection(url);
196196
throw new Exception ("connection should have failed after token cancellation");
197197
} catch (SQLException e) {

common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333

3434
import com.google.common.base.Splitter;
3535
import com.google.common.collect.Sets;
36-
import org.apache.hive.common.IPStackUtils;
36+
import org.apache.hadoop.hive.common.IPStackUtils;
3737
import org.apache.thrift.transport.TSSLTransportFactory;
3838
import org.apache.thrift.transport.TSSLTransportFactory.TSSLTransportParameters;
3939
import org.apache.thrift.transport.TServerSocket;

contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
2929
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
3030
import org.apache.hadoop.io.Text;
31-
import org.apache.hive.common.IPStackUtils;
31+
import org.apache.hadoop.hive.common.IPStackUtils;
3232
import static org.junit.Assert.assertEquals;
3333
import org.junit.Test;
3434

hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@
7575
import org.apache.hadoop.mapred.OutputFormat;
7676
import org.apache.hadoop.mapreduce.Job;
7777
import org.apache.hadoop.util.StringUtils;
78-
import org.apache.hive.common.IPStackUtils;
78+
import org.apache.hadoop.hive.common.IPStackUtils;
7979
import org.slf4j.Logger;
8080
import org.slf4j.LoggerFactory;
8181

hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
import org.apache.hadoop.hive.metastore.api.Table;
3030
import org.apache.hadoop.hive.ql.plan.TableDesc;
3131
import org.apache.hadoop.mapred.JobConf;
32-
import org.apache.hive.common.IPStackUtils;
32+
import org.apache.hadoop.hive.common.IPStackUtils;
3333
import org.junit.Assert;
3434
import org.junit.Test;
3535
import org.mockito.Mockito;

hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
import org.apache.hadoop.hdfs.MiniDFSCluster;
3535
import org.apache.hadoop.mapred.JobConf;
3636
import org.apache.hadoop.mapred.MiniMRCluster;
37-
import org.apache.hive.common.IPStackUtils;
37+
import org.apache.hadoop.hive.common.IPStackUtils;
3838

3939
/**
4040
* This class builds a single instance of itself with the Singleton

hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434

3535
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
3636
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
37-
import org.apache.hive.common.IPStackUtils;
37+
import org.apache.hadoop.hive.common.IPStackUtils;
3838
import org.slf4j.Logger;
3939
import org.slf4j.LoggerFactory;
4040
import org.apache.commons.lang3.StringUtils;
@@ -72,7 +72,6 @@
7272
import javax.servlet.ServletException;
7373
import javax.servlet.ServletRequest;
7474
import javax.servlet.ServletResponse;
75-
import javax.servlet.annotation.WebFilter;
7675
import javax.servlet.http.HttpServletRequest;
7776
import javax.servlet.http.HttpServletResponse;
7877

hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
import org.apache.hadoop.fs.FileSystem;
2727
import org.apache.hadoop.hive.shims.HadoopShimsSecure;
2828
import org.apache.hadoop.util.StringUtils;
29-
import org.apache.hive.common.IPStackUtils;
29+
import org.apache.hadoop.hive.common.IPStackUtils;
3030
import org.junit.After;
3131
import org.junit.Assert;
3232
import org.junit.Before;

iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,12 @@
2323
import java.util.List;
2424
import java.util.stream.Collectors;
2525
import org.apache.hadoop.conf.Configuration;
26+
import org.apache.hadoop.hive.common.IPStackUtils;
2627
import org.apache.hadoop.hive.conf.HiveConf;
2728
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
2829
import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
2930
import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
3031
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
31-
import org.apache.hive.common.IPStackUtils;
3232
import org.apache.hive.service.cli.CLIService;
3333
import org.apache.hive.service.cli.HiveSQLException;
3434
import org.apache.hive.service.cli.OperationHandle;

0 commit comments

Comments
 (0)