Skip to content

Commit

Permalink
Kyligence#99 add config to allow user hadoop classpath first
Browse files Browse the repository at this point in the history
Kyligence#100 submit job error when current user not have admin acl
  • Loading branch information
hn5092 committed Mar 4, 2020
1 parent 9e1aaa1 commit ff459da
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -500,6 +500,16 @@ private[spark] class ApplicationMaster(args: ApplicationMasterArguments) extends
}

private def runExecutorLauncher(): Unit = {
val classLoader = Thread.currentThread.getContextClassLoader
val configs = Array("core-site.xml", "hdfs-site.xml",
"hive-site.xml", "mapred-site.xml", "yarn-site.xml")
configs.foreach{ fileName =>
val url = classLoader.getResource(fileName)
if (url != null) {
val path = url.getPath
logInfo(path + " is used as " + fileName)
} else logInfo(fileName + " does not exist in the resources")
}
val hostname = Utils.localHostName
val amCores = sparkConf.get(AM_CORES)
rpcEnv = RpcEnv.create("sparkYarnAM", hostname, hostname, -1, sparkConf, securityMgr,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,9 +159,13 @@ private[spark] class Client(
launcherBackend.connect()
yarnClient.init(hadoopConf)
yarnClient.start()

logInfo("Requesting a new application from cluster with %d NodeManagers"
.format(yarnClient.getYarnClusterMetrics.getNumNodeManagers))
try {
logInfo("Requesting a new application from cluster with %d NodeManagers"
.format(yarnClient.getYarnClusterMetrics.getNumNodeManagers))
} catch {
case th: Throwable =>
logWarning("Get cluster metrics error", th)
}

// Get a new application from our RM
val newApp = yarnClient.createApplication()
Expand Down Expand Up @@ -1269,6 +1273,10 @@ private object Client extends Logging {
sparkConf: SparkConf,
env: HashMap[String, String],
extraClassPath: Option[String] = None): Unit = {
if (sparkConf.getBoolean("spark.yarn.user.hadoopConfClasspath.first", defaultValue = false)) {
addClasspathEntry(
buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR, LOCALIZED_HADOOP_CONF_DIR), env)
}
extraClassPath.foreach { cp =>
addClasspathEntry(getClusterPath(sparkConf, cp), env)
}
Expand Down Expand Up @@ -1315,12 +1323,13 @@ private object Client extends Logging {
sys.env.get(ENV_DIST_CLASSPATH).foreach { cp =>
addClasspathEntry(getClusterPath(sparkConf, cp), env)
}

// Add the localized Hadoop config at the end of the classpath, in case it contains other
// files (such as configuration files for different services) that are not part of the
// YARN cluster's config.
addClasspathEntry(
buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR, LOCALIZED_HADOOP_CONF_DIR), env)
if (!sparkConf.getBoolean("spark.yarn.user.hadoopConfClasspath.first", defaultValue = false)) {
// Add the localized Hadoop config at the end of the classpath, in case it contains other
// files (such as configuration files for different services) that are not part of the
// YARN cluster's config.
addClasspathEntry(
buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR, LOCALIZED_HADOOP_CONF_DIR), env)
}
}

/**
Expand Down

0 comments on commit ff459da

Please sign in to comment.