Skip to content

Commit 1570206

Browse files
committed
[SPARK-52849][CORE] Add stringifyException to o.a.s.util.Utils
### What changes were proposed in this pull request? This PR aims to add `stringifyException` to `o.a.s.util.Utils`. ### Why are the changes needed? We can use it stably inside Spark without considering Hadoop change. In addition, this is a stable and straitforward method. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #51540 from dongjoon-hyun/SPARK-52849. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 35596e8 commit 1570206

File tree

4 files changed

+21
-10
lines changed

4 files changed

+21
-10
lines changed

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ import org.apache.hadoop.io.compress.{CompressionCodecFactory, SplittableCompres
5959
import org.apache.hadoop.ipc.{CallerContext => HadoopCallerContext}
6060
import org.apache.hadoop.ipc.CallerContext.{Builder => HadoopCallerContextBuilder}
6161
import org.apache.hadoop.security.UserGroupInformation
62-
import org.apache.hadoop.util.{RunJar, StringUtils}
62+
import org.apache.hadoop.util.RunJar
6363
import org.apache.hadoop.yarn.conf.YarnConfiguration
6464
import org.apache.logging.log4j.{Level, LogManager}
6565
import org.apache.logging.log4j.core.LoggerContext
@@ -435,7 +435,7 @@ private[spark] object Utils
435435
if (!source.exists()) {
436436
throw new FileNotFoundException(source.getAbsolutePath)
437437
}
438-
val lowerSrc = StringUtils.toLowerCase(source.getName)
438+
val lowerSrc = source.getName.toLowerCase(Locale.ENGLISH)
439439
if (lowerSrc.endsWith(".jar")) {
440440
RunJar.unJar(source, dest, RunJar.MATCH_ANY)
441441
} else if (lowerSrc.endsWith(".zip")) {
@@ -3166,6 +3166,17 @@ private[spark] object Utils
31663166
"true".equals(useG1GC)
31673167
}.getOrElse(false)
31683168
}
3169+
3170+
/**
3171+
* Return a string of printStackTrace result.
3172+
*/
3173+
def stringifyException(e: Throwable): String = {
3174+
val stm = new StringWriter()
3175+
val wrt = new PrintWriter(stm)
3176+
e.printStackTrace(wrt)
3177+
wrt.close()
3178+
stm.toString()
3179+
}
31693180
}
31703181

31713182
private[util] object CallerContext extends Logging {

resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@ import scala.util.control.NonFatal
3131
import org.apache.commons.lang3.{StringUtils => ComStrUtils}
3232
import org.apache.hadoop.fs.{FileSystem, Path}
3333
import org.apache.hadoop.security.UserGroupInformation
34-
import org.apache.hadoop.util.StringUtils
3534
import org.apache.hadoop.yarn.api._
3635
import org.apache.hadoop.yarn.api.records._
3736
import org.apache.hadoop.yarn.conf.YarnConfiguration
@@ -272,7 +271,7 @@ private[spark] class ApplicationMaster(
272271
logError("Uncaught exception: ", e)
273272
finish(FinalApplicationStatus.FAILED,
274273
ApplicationMaster.EXIT_UNCAUGHT_EXCEPTION,
275-
"Uncaught exception: " + StringUtils.stringifyException(e))
274+
"Uncaught exception: " + Utils.stringifyException(e))
276275
} finally {
277276
try {
278277
metricsSystem.foreach { ms =>
@@ -315,7 +314,7 @@ private[spark] class ApplicationMaster(
315314
logError("Uncaught exception: ", e)
316315
finish(FinalApplicationStatus.FAILED,
317316
ApplicationMaster.EXIT_UNCAUGHT_EXCEPTION,
318-
"Uncaught exception: " + StringUtils.stringifyException(e))
317+
"Uncaught exception: " + Utils.stringifyException(e))
319318
if (!unregistered) {
320319
// It's ok to clean staging dir first because unmanaged AM can't be retried.
321320
cleanupStagingDir(stagingDir)
@@ -592,7 +591,7 @@ private[spark] class ApplicationMaster(
592591
if (!NonFatal(e)) {
593592
finish(FinalApplicationStatus.FAILED,
594593
ApplicationMaster.EXIT_REPORTER_FAILURE,
595-
"Fatal exception: " + StringUtils.stringifyException(e))
594+
"Fatal exception: " + Utils.stringifyException(e))
596595
} else if (failureCount >= reporterMaxFailures) {
597596
finish(FinalApplicationStatus.FAILED,
598597
ApplicationMaster.EXIT_REPORTER_FAILURE, "Exception was thrown " +
@@ -758,7 +757,7 @@ private[spark] class ApplicationMaster(
758757
logError("User class threw exception: ", cause)
759758
finish(FinalApplicationStatus.FAILED,
760759
ApplicationMaster.EXIT_EXCEPTION_USER_CLASS,
761-
"User class threw exception: " + StringUtils.stringifyException(cause))
760+
"User class threw exception: " + Utils.stringifyException(cause))
762761
}
763762
sparkContextPromise.tryFailure(e.getCause())
764763
} finally {

sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@
4646
import org.apache.spark.internal.SparkLoggerFactory;
4747
import org.apache.spark.internal.LogKeys;
4848
import org.apache.spark.internal.MDC;
49+
import org.apache.spark.util.Utils;
4950

5051
/**
5152
* ThriftCLIService.
@@ -593,8 +594,7 @@ public TGetOperationStatusResp GetOperationStatus(TGetOperationStatusReq req) th
593594
if (opException != null) {
594595
resp.setSqlState(opException.getSQLState());
595596
resp.setErrorCode(opException.getErrorCode());
596-
resp.setErrorMessage(org.apache.hadoop.util.StringUtils
597-
.stringifyException(opException));
597+
resp.setErrorMessage(Utils.stringifyException(opException));
598598
}
599599
resp.setStatus(OK_STATUS);
600600
} catch (Exception e) {

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ import org.apache.spark.sql.internal.{SharedState, SQLConf}
5252
import org.apache.spark.sql.internal.SQLConf.LEGACY_EMPTY_CURRENT_DB_IN_CLI
5353
import org.apache.spark.util.ShutdownHookManager
5454
import org.apache.spark.util.SparkExitCode._
55+
import org.apache.spark.util.Utils
5556

5657
/**
5758
* This code doesn't support remote connections in Hive 1.2+, as the underlying CliDriver
@@ -504,7 +505,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
504505
case e: IOException =>
505506
console.printError(
506507
s"""Failed with exception ${e.getClass.getName}: ${e.getMessage}
507-
|${org.apache.hadoop.util.StringUtils.stringifyException(e)}
508+
|${Utils.stringifyException(e)}
508509
""".stripMargin)
509510
ret = 1
510511
}

0 commit comments

Comments
 (0)