diff --git a/tools/hive/README.md b/tools/hive/README.md new file mode 100644 index 00000000..97d816f1 --- /dev/null +++ b/tools/hive/README.md @@ -0,0 +1,61 @@ +# Cloudera Hadoop and Hive Docker Image with Kerberos + + +This is a Hadoop Docker image running CDH5 versions of Hadoop and Hive, all in one container. There is a separate Kerberos image in which Hadoop and Hive use Kerberos for authentication. Adapted from https://github.com/tilakpatidar/cdh5_hive_postgres and based on Ubuntu (trusty). + +Postgres is also installed so that Hive can use it for its Metastore backend and run in remote mode. + +## Current Version +* Hadoop 2.6.0 +* Hive 1.1.0 + +## Dependencies +The Kerberos image assumes that a KDC has been launched by the dcos-commons kdc.py script. + +## Build the image + +### Build the Hadoop + Hive image: +``` +cd hadoop-hive +docker build -t cdh5-hive . +``` + +### Build the Kerberized Hadoop + Hive image: +First, autogenerate the Hadoop config files. +``` +cd ../kerberos +scripts/generate_configs.sh +``` + +Then build the image: +``` +docker build -t cdh5-hive-kerberos . +``` + +## Run the Hive image interactively +``` +docker run -it cdh5-hive:latest /etc/hive-bootstrap.sh -bash +``` + +## Run the Kerberized Hive image in DC/OS +First, deploy a KDC via the dcos-commons kdc.py utility. See [the kdc README](https://github.com/mesosphere/dcos-commons/tree/master/tools/kdc) for details. + +From the dcos-commons repo: +``` +PYTHONPATH=testing ./tools/kdc/kdc.py deploy principals.txt +``` + +At a minimum, `principals.txt` should include the following principals (for the Hadoop container hostname, pick any private agent in the cluster): + +``` +hdfs/@LOCAL +HTTP/@LOCAL +yarn/@LOCAL +hive/@LOCAL +``` + +Deploy the Kerberized Hadoop / Hive container via Marathon. (Update the Marathon config's `constraint` field first with the host selected above.) + +``` +dcos marathon app add kerberos/marathon/hdfs-hive-kerberos.json +``` diff --git a/tools/hive/hadoop-hive/Dockerfile b/tools/hive/hadoop-hive/Dockerfile new file mode 100644 index 00000000..e19c4903 --- /dev/null +++ b/tools/hive/hadoop-hive/Dockerfile @@ -0,0 +1,143 @@ +FROM ubuntu:16.04 + +USER root + +# install dev tools +RUN apt-get update && \ + apt-get install -y curl wget tar openssh-server openssh-client rsync python-software-properties apt-file apache2 && \ + rm -rf /var/lib/apt/lists/* + +# for running sshd in ubuntu trusty. https://github.com/docker/docker/issues/5704 +RUN mkdir /var/run/sshd +RUN echo 'root:secretpasswd' | chpasswd +RUN sed -i 's/PermitRootLogin without-password/PermitRootLogin yes/' /etc/ssh/sshd_config + +# passwordless ssh +RUN yes | ssh-keygen -q -N "" -t dsa -f /etc/ssh/ssh_host_dsa_key +RUN yes | ssh-keygen -q -N "" -t rsa -f /etc/ssh/ssh_host_rsa_key +RUN yes | ssh-keygen -q -N "" -t rsa -f /root/.ssh/id_rsa +RUN cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys + +# fix the 254 error code +RUN sed -i "/^[^#]*UsePAM/ s/.*/#&/" /etc/ssh/sshd_config +RUN echo "UsePAM no" >> /etc/ssh/sshd_config +RUN echo "Port 2122" >> /etc/ssh/sshd_config +RUN /usr/sbin/sshd + +# ssh client config +ADD conf/ssh_config /root/.ssh/config +RUN chmod 600 /root/.ssh/config +RUN chown root:root /root/.ssh/config + +EXPOSE 22 + +# oracle jdk 8 +RUN apt-get update && \ + apt-get install -y software-properties-common && \ + add-apt-repository ppa:webupd8team/java && \ + apt-get update && \ + # to accept license agreement automatically + echo debconf shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \ + echo debconf shared/accepted-oracle-license-v1-1 seen true | debconf-set-selections && \ + apt-get install -y oracle-java8-installer && \ + rm -rf /var/lib/apt/lists/* + +# java env setup +ENV JAVA_HOME /usr/lib/jvm/java-8-oracle +ENV PATH $PATH:$JAVA_HOME/bin + +ENV CDH_VERSION 5 +ENV CDH_EXACT_VERSION 5.11.0 +ENV HADOOP_VERSION 2.6.0 + +# download cdh hadoop +RUN curl -L http://archive.cloudera.com/cdh${CDH_VERSION}/cdh/${CDH_VERSION}/hadoop-${HADOOP_VERSION}-cdh${CDH_EXACT_VERSION}.tar.gz \ + | tar -xzC /usr/local && \ + cd /usr/local && \ + ln -s ./hadoop-${HADOOP_VERSION}-cdh${CDH_EXACT_VERSION} hadoop + +# need to define JAVA_HOME inside hadoop-env.sh +ENV HADOOP_PREFIX /usr/local/hadoop +RUN sed -i '/^export JAVA_HOME/ s:.*:export JAVA_HOME=/usr/lib/jvm/java-8-oracle\n:' $HADOOP_PREFIX/etc/hadoop/hadoop-env.sh + +# pseudo distributed configurations of hadoop +ADD templates/core-site.xml.template $HADOOP_PREFIX/etc/hadoop/core-site.xml.template +ADD templates/hdfs-site.xml.template $HADOOP_PREFIX/etc/hadoop/hdfs-site.xml.template +ADD conf/mapred-site.xml $HADOOP_PREFIX/etc/hadoop/mapred-site.xml +ADD templates/yarn-site.xml.template $HADOOP_PREFIX/etc/hadoop/yarn-site.xml.template + +# add and set permissions for bootstrap script +ADD scripts/hadoop-bootstrap.sh /etc/hadoop-bootstrap.sh +RUN chown root:root /etc/hadoop-bootstrap.sh +RUN chmod 700 /etc/hadoop-bootstrap.sh + +RUN chmod +x /usr/local/hadoop/etc/hadoop/*-env.sh + +# add hadoop to path +ENV HADOOP_HOME /usr/local/hadoop +ENV PATH $PATH:$HADOOP_HOME:$HADOOP_HOME/bin + +#for exposed ports refer +#https://www.cloudera.com/documentation/enterprise/5-4-x/topics/cdh_ig_ports_cdh5.html +EXPOSE 50010 50020 50070 50075 50090 8020 9000 10020 19888 8030 8031 8032 8033 8040 8042 8088 + +# download cdh hive +ENV HIVE_VERSION 1.1.0 +RUN curl -L http://archive.cloudera.com/cdh${CDH_VERSION}/cdh/${CDH_VERSION}/hive-${HIVE_VERSION}-cdh${CDH_EXACT_VERSION}.tar.gz \ + | tar -xzC /usr/local && \ + cd /usr/local && \ + mv hive-${HIVE_VERSION}-cdh${CDH_EXACT_VERSION} hive + +# to configure postgres as hive metastore backend +RUN sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" >> /etc/apt/sources.list.d/pgdg.list' +RUN wget -q https://www.postgresql.org/media/keys/ACCC4CF8.asc -O - | apt-key add - +RUN apt-get update -y && \ + apt-get -yq install vim postgresql-9.5 libpostgresql-jdbc-java && \ + rm -rf /var/lib/apt/lists/* + +# add hive to path +ENV HIVE_HOME /usr/local/hive +ENV PATH $PATH:$HIVE_HOME/bin + +# add postgresql jdbc jar to classpath +RUN ln -s /usr/share/java/postgresql-jdbc4.jar $HIVE_HOME/lib/postgresql-jdbc4.jar + +ENV PGPASSWORD hive +USER postgres + +# initialize hive metastore db +# create metastore db, hive user and assign privileges +RUN cd $HIVE_HOME/scripts/metastore/upgrade/postgres/ && \ + /etc/init.d/postgresql start && \ + psql --command "CREATE DATABASE metastore;" && \ + psql --command "CREATE USER hive WITH PASSWORD 'hive';" && \ + psql --command "ALTER USER hive WITH SUPERUSER;" && \ + psql --command "GRANT ALL PRIVILEGES ON DATABASE metastore TO hive;" && \ + psql -U hive -d metastore -h localhost -f hive-schema-${HIVE_VERSION}.postgres.sql + +# revert back to default user +USER root + +# disable ssl in postgres.conf +ENV POSTGRES_VERSION 9.5 +ENV POSTGRESQL_CONFIG_FILE /var/lib/postgresql/${POSTGRES_VERSION}/main/postgresql.conf +ENV POSTGRESQL_MAIN /var/lib/postgresql/9.5/main/ +ADD conf/postgresql.conf $POSTGRESQL_MAIN +RUN chown postgres:postgres $POSTGRESQL_CONFIG_FILE +RUN sed -i -e 's/peer/md5/g' /etc/postgresql/$POSTGRES_VERSION/main/pg_hba.conf + +# copy config, sql, data files to /opt/files +ENV HIVE_CONF /usr/local/hive/conf +RUN mkdir /opt/files +ADD templates/hive-site.xml.template /opt/files/ +ADD templates/hive-site.xml.template $HIVE_CONF/hive-site.xml.template + +# set permissions for hive bootstrap file +ADD scripts/hive-bootstrap.sh /etc/hive-bootstrap.sh +RUN chown root:root /etc/hive-bootstrap.sh +RUN chmod 700 /etc/hive-bootstrap.sh + +EXPOSE 10000 10001 10002 10003 9083 50111 5432 + +# run bootstrap script +CMD ["/etc/hive-bootstrap.sh", "-d"] diff --git a/tools/hive/hadoop-hive/conf/mapred-site.xml b/tools/hive/hadoop-hive/conf/mapred-site.xml new file mode 100644 index 00000000..dba582f1 --- /dev/null +++ b/tools/hive/hadoop-hive/conf/mapred-site.xml @@ -0,0 +1,6 @@ + + + mapreduce.framework.name + yarn + + diff --git a/tools/hive/hadoop-hive/conf/postgresql.conf b/tools/hive/hadoop-hive/conf/postgresql.conf new file mode 100644 index 00000000..ba4c9bc3 --- /dev/null +++ b/tools/hive/hadoop-hive/conf/postgresql.conf @@ -0,0 +1,44 @@ +# ----------------------------- +# PostgreSQL configuration file +# ----------------------------- +# + +#------------------------------------------------------------------------------ +# FILE LOCATIONS +#------------------------------------------------------------------------------ + +data_directory = '/var/lib/postgresql/9.5/main' # use data in another directory +hba_file = '/var/lib/postgresql/9.5/pg_hba.conf' # host-based authentication file +ident_file = '/var/lib/postgresql/9.5/pg_ident.conf' # ident configuration file + +#------------------------------------------------------------------------------ +# CONNECTIONS AND AUTHENTICATION +#------------------------------------------------------------------------------ + +# - Connection Settings - +listen_addresses = '*' # what IP address(es) to listen on; +port = 5432 +max_connections = 100 + +# - Security and Authentication - +ssl = off + +#------------------------------------------------------------------------------ +# RESOURCE USAGE (except WAL) +#------------------------------------------------------------------------------ + +# - Memory - +shared_buffers = 128MB # min 128kB + +#------------------------------------------------------------------------------ +# ERROR REPORTING AND LOGGING +#------------------------------------------------------------------------------ + +# - Where to Log - +log_destination = 'stderr' # Valid values are combinations of +logging_collector = on # Enable capturing of stderr and csvlog +log_directory = 'pg_log' # directory where log files are written, +log_filename = 'postgresql-%a.log' # log file name pattern, +log_truncate_on_rotation = on +log_rotation_age = 1d +log_rotation_size = 0 diff --git a/tools/hive/hadoop-hive/conf/ssh_config b/tools/hive/hadoop-hive/conf/ssh_config new file mode 100644 index 00000000..535f9d32 --- /dev/null +++ b/tools/hive/hadoop-hive/conf/ssh_config @@ -0,0 +1,5 @@ +Host * + UserKnownHostsFile /dev/null + StrictHostKeyChecking no + LogLevel quiet + Port 2122 diff --git a/tools/hive/hadoop-hive/scripts/hadoop-bootstrap.sh b/tools/hive/hadoop-hive/scripts/hadoop-bootstrap.sh new file mode 100755 index 00000000..9f034511 --- /dev/null +++ b/tools/hive/hadoop-hive/scripts/hadoop-bootstrap.sh @@ -0,0 +1,27 @@ +#!/bin/bash +set -x + +# used by Hadoop +/usr/sbin/sshd + +: ${HADOOP_PREFIX:=/usr/local/hadoop} + +$HADOOP_PREFIX/etc/hadoop/hadoop-env.sh + +rm /tmp/*.pid + +# templating of config files +sed s/{{HOSTNAME}}/$HOSTNAME/ /usr/local/hadoop/etc/hadoop/core-site.xml.template > /usr/local/hadoop/etc/hadoop/core-site.xml +sed s/{{HOSTNAME}}/$HOSTNAME/ /usr/local/hadoop/etc/hadoop/yarn-site.xml.template > /usr/local/hadoop/etc/hadoop/yarn-site.xml +sed s/{{HOSTNAME}}/$HOSTNAME/ /usr/local/hadoop/etc/hadoop/hdfs-site.xml.template > /usr/local/hadoop/etc/hadoop/hdfs-site.xml + +# format namenode +$HADOOP_PREFIX/bin/hdfs namenode -format + +# start hdfs and yarn +$HADOOP_PREFIX/sbin/start-dfs.sh +$HADOOP_PREFIX/sbin/start-yarn.sh + +if [[ $1 == "-bash" ]]; then + /bin/bash +fi diff --git a/tools/hive/hadoop-hive/scripts/hive-bootstrap.sh b/tools/hive/hadoop-hive/scripts/hive-bootstrap.sh new file mode 100644 index 00000000..5f4f2d58 --- /dev/null +++ b/tools/hive/hadoop-hive/scripts/hive-bootstrap.sh @@ -0,0 +1,50 @@ +#!/bin/bash +set -x +#save all env vars .bashrc for ssh sessions +printenv | cat >> /root/.bashrc + +# hadoop bootstrap +/etc/hadoop-bootstrap.sh + +# restart postgresql +/etc/init.d/postgresql restart + +# kinit for kerberos mode +if command -v kinit 2>/dev/null; then + kinit -k -t /usr/local/hadoop/etc/hadoop/hdfs.keytab hdfs@LOCAL +fi + +until hdfs dfs -ls / +do + echo "waiting for hdfs to be ready"; sleep 10; +done + +# create hdfs directories +$HADOOP_PREFIX/bin/hdfs dfs -mkdir -p /user/root +hdfs dfs -chown -R hdfs:supergroup /user + +$HADOOP_PREFIX/bin/hdfs dfs -mkdir -p /apps/hive/warehouse +hdfs dfs -chown -R hive:supergroup /apps/hive +hdfs dfs -chmod 777 /apps/hive/warehouse + +# altering the hive-site configuration +sed s/{{HOSTNAME}}/$HOSTNAME/ /usr/local/hive/conf/hive-site.xml.template > /usr/local/hive/conf/hive-site.xml +sed s/{{HOSTNAME}}/$HOSTNAME/ /opt/files/hive-site.xml.template > /opt/files/hive-site.xml + +# start hive metastore server +$HIVE_HOME/bin/hive --service metastore & + +sleep 20 + +# start hive server +$HIVE_HOME/bin/hive --service hiveserver2 & + + +if [[ $1 == "-bash" ]]; then + /bin/bash +elif [[ $1 == "-d" ]]; then + while true; do sleep 10000; done +else + echo "Unknown argument $1" + echo "Usage: ./hive-bootstrap.sh [ -bash | -d ]" +fi diff --git a/tools/hive/hadoop-hive/templates/core-site.xml.template b/tools/hive/hadoop-hive/templates/core-site.xml.template new file mode 100644 index 00000000..11dc4a93 --- /dev/null +++ b/tools/hive/hadoop-hive/templates/core-site.xml.template @@ -0,0 +1,6 @@ + + + fs.defaultFS + hdfs://{{HOSTNAME}}:9000 + + diff --git a/tools/hive/hadoop-hive/templates/hdfs-site.xml.template b/tools/hive/hadoop-hive/templates/hdfs-site.xml.template new file mode 100644 index 00000000..82c525ea --- /dev/null +++ b/tools/hive/hadoop-hive/templates/hdfs-site.xml.template @@ -0,0 +1,6 @@ + + + dfs.replication + 1 + + diff --git a/tools/hive/hadoop-hive/templates/hive-site.xml.template b/tools/hive/hadoop-hive/templates/hive-site.xml.template new file mode 100755 index 00000000..8e61b910 --- /dev/null +++ b/tools/hive/hadoop-hive/templates/hive-site.xml.template @@ -0,0 +1,23 @@ + + + + javax.jdo.option.ConnectionDriverName + org.postgresql.Driver + + + javax.jdo.option.ConnectionURL + jdbc:postgresql://localhost/metastore + + + javax.jdo.option.ConnectionUserName + hive + + + javax.jdo.option.ConnectionPassword + hive + + + hive.metastore.warehouse.dir + /apps/hive/warehouse + + diff --git a/tools/hive/hadoop-hive/templates/yarn-site.xml.template b/tools/hive/hadoop-hive/templates/yarn-site.xml.template new file mode 100644 index 00000000..83138436 --- /dev/null +++ b/tools/hive/hadoop-hive/templates/yarn-site.xml.template @@ -0,0 +1,2 @@ + + diff --git a/tools/hive/kerberos/Dockerfile b/tools/hive/kerberos/Dockerfile new file mode 100644 index 00000000..19559f65 --- /dev/null +++ b/tools/hive/kerberos/Dockerfile @@ -0,0 +1,21 @@ +FROM cdh5-hive + +# copy kerberized hadoop config files +ADD templates/core-site.xml.template $HADOOP_PREFIX/etc/hadoop/core-site.xml.template +ADD templates/hdfs-site.xml.template $HADOOP_PREFIX/etc/hadoop/hdfs-site.xml.template +ADD templates/yarn-site.xml.template $HADOOP_PREFIX/etc/hadoop/yarn-site.xml.template + +# copy kerberized hive config file +ADD templates/hive-site.xml.template /opt/files/ +ADD templates/hive-site.xml.template $HIVE_CONF/hive-site.xml.template + +# krb5.conf +ADD conf/krb5.conf /etc/ + +# install kinit, used in bootstrap script +RUN apt-get update && \ + apt-get install -y krb5-user && \ + rm -rf /var/lib/apt/lists/* + +# run bootstrap script which starts hadoop and hive servers +CMD ["/etc/hive-bootstrap.sh", "-d"] diff --git a/tools/hive/kerberos/conf/krb5.conf b/tools/hive/kerberos/conf/krb5.conf new file mode 100644 index 00000000..08416abb --- /dev/null +++ b/tools/hive/kerberos/conf/krb5.conf @@ -0,0 +1,10 @@ +[libdefaults] +default_realm = LOCAL +dns_lookup_realm = true +dns_lookup_kdc = true +udp_preference_limit = 1 + +[realms] +LOCAL = { + kdc = kdc.marathon.mesos:2500 +} diff --git a/tools/hive/kerberos/marathon/hdfs-hive-kerberos.json b/tools/hive/kerberos/marathon/hdfs-hive-kerberos.json new file mode 100644 index 00000000..8cfa91ac --- /dev/null +++ b/tools/hive/kerberos/marathon/hdfs-hive-kerberos.json @@ -0,0 +1,39 @@ +{ + "id": "/cdh5-hadoop-hive-kerberos", + "instances": 1, + "cpus": 1, + "mem": 4096, + "user": "root", + "container": { + "type": "MESOS", + "docker": { + "image": "susanxhuynh/cdh5-hive-kerberos:latest", + "forcePullImage": true + }, + "volumes": [ + { + "containerPath": "/usr/local/hadoop/etc/hadoop/hdfs.keytab", + "secret": "keytab", + "hostPath": "" + } + ] + }, + "secrets": { + "keytab": { + "source": "__dcos_base64___keytab" + } + }, + "networks": [ + { + "mode": "host" + } + ], + "requirePorts": false, + "constraints": [ + [ + "hostname", + "IS", + "10.0.0.114" + ] + ] +} diff --git a/tools/hive/kerberos/scripts/generate_configs.sh b/tools/hive/kerberos/scripts/generate_configs.sh new file mode 100755 index 00000000..c20629b0 --- /dev/null +++ b/tools/hive/kerberos/scripts/generate_configs.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Generate kerberos versions of Hadoop config files by taking the unkerberized versions +# and tacking on the associated kerberos properties. +# Assumption is that these two sets of properties do not overlap. + +cd "$( dirname "${BASH_SOURCE[0]}" )" +for FILE_BASE in core-site hdfs-site hive-site yarn-site; do + COMBINED_FILE="../templates/${FILE_BASE}.xml.template" + echo "Generating config file: kerberos/templates/${FILE_BASE}.xml.template" + echo '' > $COMBINED_FILE + grep -vh '' "../../hadoop-hive/templates/${FILE_BASE}.xml.template" >> $COMBINED_FILE + echo "" >> $COMBINED_FILE + grep -vh '' ../templates/${FILE_BASE}-kerberos.xml.template >> $COMBINED_FILE + echo '' >> $COMBINED_FILE +done diff --git a/tools/hive/kerberos/templates/core-site-kerberos.xml.template b/tools/hive/kerberos/templates/core-site-kerberos.xml.template new file mode 100644 index 00000000..1f554fee --- /dev/null +++ b/tools/hive/kerberos/templates/core-site-kerberos.xml.template @@ -0,0 +1,23 @@ + + + + hadoop.security.authentication + kerberos + + + + hadoop.security.authorization + true + + + + + hadoop.proxyuser.hive.hosts + * + + + + hadoop.proxyuser.hive.groups + * + + diff --git a/tools/hive/kerberos/templates/hdfs-site-kerberos.xml.template b/tools/hive/kerberos/templates/hdfs-site-kerberos.xml.template new file mode 100644 index 00000000..06e19189 --- /dev/null +++ b/tools/hive/kerberos/templates/hdfs-site-kerberos.xml.template @@ -0,0 +1,53 @@ + + + + dfs.block.access.token.enable + true + + + + + dfs.namenode.keytab.file + /usr/local/hadoop/etc/hadoop/hdfs.keytab + + + dfs.namenode.kerberos.principal + hdfs/{{HOSTNAME}}@LOCAL + + + + + dfs.secondary.namenode.keytab.file + /usr/local/hadoop/etc/hadoop/hdfs.keytab + + + dfs.secondary.namenode.kerberos.principal + hdfs/{{HOSTNAME}}@LOCAL + + + + + dfs.datanode.keytab.file + /usr/local/hadoop/etc/hadoop/hdfs.keytab + + + dfs.datanode.kerberos.principal + hdfs/{{HOSTNAME}}@LOCAL + + + + + dfs.web.authentication.kerberos.principal + HTTP/{{HOSTNAME}}@LOCAL + + + + dfs.web.authentication.kerberos.keytab + /usr/local/hadoop/etc/hadoop/hdfs.keytab + + + + ignore.secure.ports.for.testing + true + + diff --git a/tools/hive/kerberos/templates/hive-site-kerberos.xml.template b/tools/hive/kerberos/templates/hive-site-kerberos.xml.template new file mode 100755 index 00000000..5dad8f1f --- /dev/null +++ b/tools/hive/kerberos/templates/hive-site-kerberos.xml.template @@ -0,0 +1,48 @@ + + + + hive.server2.authentication + KERBEROS + + + + hive.server2.authentication.kerberos.principal + hive/{{HOSTNAME}}@LOCAL + + + + hive.server2.authentication.kerberos.keytab + /usr/local/hadoop/etc/hadoop/hdfs.keytab + + + + hive.metastore.sasl.enabled + true + + + + hive.metastore.kerberos.keytab.file + /usr/local/hadoop/etc/hadoop/hdfs.keytab + + + + hive.metastore.kerberos.principal + hive/{{HOSTNAME}}@LOCAL + + + + hive.security.authorization.createtable.owner.grants + ALL + The set of privileges automatically granted to the owner whenever a table gets created. + + + + hive.users.in.admin.role + hdfs,hive + + + + hive.security.authorization.manager + org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider + + diff --git a/tools/hive/kerberos/templates/yarn-site-kerberos.xml.template b/tools/hive/kerberos/templates/yarn-site-kerberos.xml.template new file mode 100644 index 00000000..fe5bd154 --- /dev/null +++ b/tools/hive/kerberos/templates/yarn-site-kerberos.xml.template @@ -0,0 +1,23 @@ + + + + yarn.resourcemanager.keytab + /usr/local/hadoop/etc/hadoop/hdfs.keytab + + + + yarn.resourcemanager.principal + yarn/{{HOSTNAME}}@LOCAL + + + + + yarn.nodemanager.keytab + /usr/local/hadoop/etc/hadoop/hdfs.keytab + + + + yarn.nodemanager.principal + yarn/{{HOSTNAME}}@LOCAL + +