in repl/scala-2.12/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala [41:87]
override def start(): Unit = {
require(sparkILoop == null)
val rootDir = conf.get("spark.repl.classdir", System.getProperty("java.io.tmpdir"))
val outputDir = Files.createTempDirectory(Paths.get(rootDir), "spark").toFile
outputDir.deleteOnExit()
conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath)
val settings = new Settings()
settings.processArguments(List("-Yrepl-class-based",
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}"), true)
settings.usejavacp.value = true
settings.embeddedDefaults(Thread.currentThread().getContextClassLoader())
sparkILoop = new SparkILoop(None, new JPrintWriter(outputStream, true))
sparkILoop.settings = settings
sparkILoop.createInterpreter()
sparkILoop.initializeSynchronous()
restoreContextClassLoader {
sparkILoop.compilerClasspath
sparkILoop.ensureClassLoader
var classLoader = Thread.currentThread().getContextClassLoader
while (classLoader != null) {
if (classLoader.getClass.getCanonicalName ==
"org.apache.spark.util.MutableURLClassLoader") {
val extraJarPath = classLoader.asInstanceOf[URLClassLoader].getURLs()
// Check if the file exists. Otherwise an exception will be thrown.
.filter { u => u.getProtocol == "file" && new File(u.getPath).isFile }
// Livy rsc and repl are also in the extra jars list. Filter them out.
.filterNot { u => Paths.get(u.toURI).getFileName.toString.startsWith("livy-") }
// Some bad spark packages depend on the wrong version of scala-reflect. Blacklist it.
.filterNot { u =>
Paths.get(u.toURI).getFileName.toString.contains("org.scala-lang_scala-reflect")
}
extraJarPath.foreach { p => debug(s"Adding $p to Scala interpreter's class path...") }
sparkILoop.addUrlsToClassPath(extraJarPath: _*)
classLoader = null
} else {
classLoader = classLoader.getParent
}
}
postStart()
}
}