Skip to content

Commit

Permalink
check isDirectory in home dir filter
Browse files Browse the repository at this point in the history
  • Loading branch information
bowenliang123 committed Sep 21, 2023
1 parent f1cb15e commit 0329741
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.kyuubi.engine

import java.io.{File, FilenameFilter, IOException}
import java.io.{File, FileFilter, IOException}
import java.net.URI
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}
Expand Down Expand Up @@ -296,8 +296,10 @@ trait ProcBuilder {
}
}

protected lazy val engineHomeDirFilter: FilenameFilter = (_: File, name: String) =>
name.contains(s"$shortName-") && !name.contains("-engine")
protected lazy val engineHomeDirFilter: FileFilter = (f: File) => {
val fileName = f.getName
f.isDirectory && fileName.contains(s"$shortName-") && !fileName.contains("-engine")
}

/**
* Get the home directly that contains binary distributions of engines.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.kyuubi.engine.spark

import java.io.{File, FilenameFilter, IOException}
import java.io.{File, FileFilter, IOException}
import java.nio.file.Paths
import java.util.Locale

Expand All @@ -27,7 +27,6 @@ import scala.collection.mutable.ArrayBuffer
import com.google.common.annotations.VisibleForTesting
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.security.UserGroupInformation
import org.apache.hadoop.shaded.org.apache.commons.io.filefilter.RegexFileFilter

import org.apache.kyuubi._
import org.apache.kyuubi.config.KyuubiConf
Expand Down Expand Up @@ -105,21 +104,21 @@ class SparkProcessBuilder(

private lazy val sparkCoreScalaVersion: String = {
Paths.get(sparkHome, "jars").toFile
.list(new RegexFileFilter("^spark-core_.*\\.jar$"))
.list((_, name) => name.matches("^spark-core_.*\\.jar$"))
.map { p => p.substring(p.indexOf("_") + 1, p.lastIndexOf("-")) }
.head
}

override protected def engineScalaBinaryVersion: String =
StringUtils.defaultIfBlank(System.getenv("SPARK_SCALA_VERSION"), sparkCoreScalaVersion)

override protected lazy val engineHomeDirFilter: FilenameFilter = {
override protected lazy val engineHomeDirFilter: FileFilter = (file: File) => {
val pattern = if (SemanticVersion(SCALA_COMPILE_VERSION) >= "2.13") {
"^spark-\\d+\\.\\d+\\.\\d+-bin-hadoop\\d(\\.\\d+)?+-scala\\d+(\\.\\d+)?$"
} else {
"^spark-\\d+\\.\\d+\\.\\d+-bin-hadoop\\d+(\\.\\d+)?$"
}
new RegexFileFilter(pattern)
file.isDirectory && file.getName.matches(pattern)
}

override protected lazy val commands: Array[String] = {
Expand Down

0 comments on commit 0329741

Please sign in to comment.