diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 7b7d9dd72a34..7c75eea99012 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -304,8 +304,8 @@ private[spark] class SparkSubmit extends Logging { // Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files // too for packages that include Python code val resolvedMavenCoordinates = DependencyUtils.resolveMavenDependencies( - packagesTransitive = true, args.packagesExclusions, args.packages, - args.repositories, args.ivyRepoPath, args.ivySettingsPath) + packagesTransitive = true, Option(args.packagesExclusions), Option(args.packages), + Option(args.repositories), Option(args.ivyRepoPath), args.ivySettingsPath) if (resolvedMavenCoordinates.nonEmpty) { // In K8s client mode, when in the driver, add resolved jars early as we might need diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala b/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala index 7cf961f42112..7fa2be0bb523 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala @@ -81,7 +81,7 @@ object DriverWrapper extends Logging { val resolvedMavenCoordinates = DependencyUtils.resolveMavenDependencies(true, ivyProperties.packagesExclusions, ivyProperties.packages, ivyProperties.repositories, - ivyProperties.ivyRepoPath, Option(ivyProperties.ivySettingsPath)) + ivyProperties.ivyRepoPath, ivyProperties.ivySettingsPath) val jars = { val jarsProp = sys.props.get(config.JARS.key).orNull if (resolvedMavenCoordinates.nonEmpty) { diff --git a/core/src/main/scala/org/apache/spark/util/DependencyUtils.scala b/core/src/main/scala/org/apache/spark/util/DependencyUtils.scala index 0d78af2dafc9..a029342e1104 100644 --- a/core/src/main/scala/org/apache/spark/util/DependencyUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/DependencyUtils.scala @@ -29,11 +29,11 @@ import org.apache.spark.deploy.SparkSubmitUtils import org.apache.spark.internal.Logging case class IvyProperties( - packagesExclusions: String, - packages: String, - repositories: String, - ivyRepoPath: String, - ivySettingsPath: String) + packagesExclusions: Option[String], + packages: Option[String], + repositories: Option[String], + ivyRepoPath: Option[String], + ivySettingsPath: Option[String]) private[spark] object DependencyUtils extends Logging { @@ -44,7 +44,7 @@ private[spark] object DependencyUtils extends Logging { "spark.jars.repositories", "spark.jars.ivy", "spark.jars.ivySettings" - ).map(sys.props.get(_).orNull) + ).map(sys.props.get(_)) IvyProperties(packagesExclusions, packages, repositories, ivyRepoPath, ivySettingsPath) } @@ -69,10 +69,10 @@ private[spark] object DependencyUtils extends Logging { * Example: Input: excludeorg.mortbay.jetty:jetty,org.eclipse.jetty:jetty-http * Output: [org.mortbay.jetty:jetty,org.eclipse.jetty:jetty-http] */ - private def parseQueryParams(uri: URI): (Boolean, String) = { + private def parseQueryParams(uri: URI): (Boolean, Option[String]) = { val uriQuery = uri.getQuery if (uriQuery == null) { - (false, "") + (false, None) } else { val mapTokens = uriQuery.split("&").map(_.split("=")) if (mapTokens.exists(isInvalidQueryString)) { @@ -103,7 +103,7 @@ private[spark] object DependencyUtils extends Logging { } excludes }.mkString(",") - }.getOrElse("") + } val validParams = Set("transitive", "exclude") val invalidParams = groupedParams.keys.filterNot(validParams.contains).toSeq @@ -150,36 +150,32 @@ private[spark] object DependencyUtils extends Logging { resolveMavenDependencies( transitive, exclusionList, - authority, + Some(authority), ivyProperties.repositories, ivyProperties.ivyRepoPath, - Option(ivyProperties.ivySettingsPath) + ivyProperties.ivySettingsPath ) } def resolveMavenDependencies( packagesTransitive: Boolean, - packagesExclusions: String, - packages: String, - repositories: String, - ivyRepoPath: String, + packagesExclusions: Option[String], + packages: Option[String], + repositories: Option[String], + ivyRepoPath: Option[String], ivySettingsPath: Option[String]): Seq[String] = { val exclusions: Seq[String] = - if (!StringUtils.isBlank(packagesExclusions)) { - packagesExclusions.split(",") - } else { - Nil - } + packagesExclusions.map(_.split(",").toSeq).getOrElse(Nil) // Create the IvySettings, either load from file or build defaults val ivySettings = ivySettingsPath match { case Some(path) => - SparkSubmitUtils.loadIvySettings(path, Option(repositories), Option(ivyRepoPath)) + SparkSubmitUtils.loadIvySettings(path, repositories, ivyRepoPath) case None => - SparkSubmitUtils.buildIvySettings(Option(repositories), Option(ivyRepoPath)) + SparkSubmitUtils.buildIvySettings(repositories, ivyRepoPath) } - SparkSubmitUtils.resolveMavenCoordinates(packages, ivySettings, + SparkSubmitUtils.resolveMavenCoordinates(packages.orNull, ivySettings, transitive = packagesTransitive, exclusions = exclusions) }