diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/cloud/KubeConfigBackend.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/cloud/KubeConfigBackend.scala index aa34cc617d6f0..e28ea637e6dbb 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/cloud/KubeConfigBackend.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/cloud/KubeConfigBackend.scala @@ -18,7 +18,7 @@ package org.apache.spark.deploy.k8s.integrationtest.backend.cloud import io.fabric8.kubernetes.client.{Config, KubernetesClient, KubernetesClientBuilder} import io.fabric8.kubernetes.client.utils.Utils -import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.{Strings, StringUtils} import org.apache.spark.deploy.k8s.integrationtest.TestConstants import org.apache.spark.deploy.k8s.integrationtest.backend.IntegrationTestBackend @@ -48,7 +48,7 @@ private[spark] class KubeConfigBackend(var context: String) // Clean up master URL which would have been specified in Spark format into a normal // K8S master URL masterUrl = checkAndGetK8sMasterUrl(masterUrl).replaceFirst("k8s://", "") - if (!StringUtils.equals(config.getMasterUrl, masterUrl)) { + if (!Strings.CS.equals(config.getMasterUrl, masterUrl)) { logInfo(s"Overriding K8S master URL ${config.getMasterUrl} from K8S config file " + s"with user specified master URL ${masterUrl}") config.setMasterUrl(masterUrl) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/expressions/expressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/expressions/expressions.scala index 263508a9d5fb4..691a4691f81a9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/expressions/expressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/expressions/expressions.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.connector.expressions import org.apache.commons.codec.binary.Hex -import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.Strings import org.apache.spark.SparkException import org.apache.spark.sql.catalyst @@ -390,7 +390,7 @@ private[sql] object HoursTransform { private[sql] final case class LiteralValue[T](value: T, dataType: DataType) extends Literal[T] { override def toString: String = dataType match { - case StringType => s"'${StringUtils.replace(s"$value", "'", "''")}'" + case StringType => s"'${Strings.CS.replace(s"$value", "'", "''")}'" case BinaryType => assert(value.isInstanceOf[Array[Byte]]) val bytes = value.asInstanceOf[Array[Byte]] diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala index da0df734bbeca..d299e0c7e6366 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala @@ -26,7 +26,7 @@ import java.util.concurrent.TimeUnit import scala.collection.mutable.ArrayBuilder import scala.util.control.NonFatal -import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.Strings import org.apache.spark.{SparkRuntimeException, SparkThrowable, SparkUnsupportedOperationException} import org.apache.spark.annotation.{DeveloperApi, Since} @@ -352,7 +352,7 @@ abstract class JdbcDialect extends Serializable with Logging { */ @Since("2.3.0") protected[jdbc] def escapeSql(value: String): String = - if (value == null) null else StringUtils.replace(value, "'", "''") + if (value == null) null else Strings.CS.replace(value, "'", "''") /** * Converts value to SQL expression. diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java index b74151a42e1af..f0ec491990da7 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java @@ -27,6 +27,7 @@ import javax.security.sasl.AuthenticationException; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Strings; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.service.ServiceUtils; @@ -70,7 +71,7 @@ public void Authenticate(String user, String password) throws AuthenticationExce } else { String[] patterns = userDNPattern.split(":"); for (String pattern : patterns) { - if (StringUtils.contains(pattern, ",") && StringUtils.contains(pattern, "=")) { + if (Strings.CS.contains(pattern, ",") && Strings.CS.contains(pattern, "=")) { candidatePrincipals.add(pattern.replaceAll("%s", user)); } } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index 083d9c4a0d436..76a2ceb925ac6 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -27,7 +27,7 @@ import scala.jdk.CollectionConverters._ import jline.console.ConsoleReader import jline.console.completer.{ArgumentCompleter, Completer, StringsCompleter} import jline.console.history.FileHistory -import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.{Strings, StringUtils} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.hive.cli.{CliDriver, CliSessionState, OptionsProcessor} import org.apache.hadoop.hive.common.HiveInterruptUtils @@ -572,7 +572,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging { val commands = splitSemiColon(line).asScala var command: String = "" for (oneCmd <- commands) { - if (StringUtils.endsWith(oneCmd, "\\")) { + if (Strings.CS.endsWith(oneCmd, "\\")) { command += StringUtils.chop(oneCmd) + ";" } else { command += oneCmd