in src/main/scala/org/apache/spark/shuffle/rss/RssSparkListener.scala [90:123]
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = {
try {
val jobResult = jobEnd.jobResult
if (jobResult != null && jobResult.isInstanceOf[JobFailed]) {
numFailedInAppJobs.inc(1)
}
} catch {
case e: Throwable => {
logWarning("Failed to run onJobEnd", e)
}
}
if (notifyServers == null || notifyServers.length == 0) {
return
}
invokeRandomNotifyServer(client => {
val jobResult = jobEnd.jobResult
var jobStatus = ""
var exceptionName = ""
var exceptionDetail = ""
if (jobResult != null) {
jobStatus = jobResult.getClass().getSimpleName()
if (jobResult.isInstanceOf[JobFailed]) {
val jobFailed = jobResult.asInstanceOf[JobFailed]
if (jobFailed.exception != null) {
exceptionName = jobFailed.exception.getClass().getSimpleName
exceptionDetail = ExceptionUtils.getStackTrace(jobFailed.exception)
}
}
}
client.finishApplicationJob(appId, attemptId, jobEnd.jobId, jobStatus, exceptionName, exceptionDetail)
})
}