in scala/src/main/org/apache/spark/deploy/csharp/CSharpRunner.scala [29:143]
def main(args: Array[String]): Unit = {
if (args.length == 0) {
throw new IllegalArgumentException("At least one argument is expected for CSharpRunner")
}
val runnerSettings = initializeCSharpRunnerSettings(args)
// determines if CSharpBackend need to be run in debug mode
// in debug mode this runner will not launch C# process
var runInDebugMode = runnerSettings._1
@volatile var csharpBackendPortNumber = runnerSettings._2
var csharpExecutable = ""
var otherArgs: Array[String] = null
if (!runInDebugMode) {
if (args(0).toLowerCase.endsWith(".zip")) {
var zipFileName = args(0)
val driverDir = new File("").getAbsoluteFile
if (zipFileName.toLowerCase.startsWith("hdfs://")) {
// standalone cluster mode, need to download the zip file from hdfs.
zipFileName = downloadDriverFile(zipFileName, driverDir.getAbsolutePath).getName
}
logInfo(s"Unzipping driver $zipFileName in $driverDir")
CSharpSparkUtils.unzip(new File(zipFileName), driverDir)
// reusing windows-specific formatting in PythonRunner
csharpExecutable = PythonRunner.formatPath(args(1))
otherArgs = args.slice(2, args.length)
} else if (new File(args(0)).isDirectory) {
// In local mode, there will no zip file generated if given a directory,
// skip uncompression in this case
// reusing windows-specific formatting in PythonRunner
csharpExecutable = PythonRunner.formatPath(args(1))
otherArgs = args.slice(2, args.length)
} else {
csharpExecutable = PythonRunner.formatPath(args(0))
otherArgs = args.slice(1, args.length)
}
} else {
otherArgs = args.slice(1, args.length)
}
var processParameters = new java.util.ArrayList[String]
processParameters.add(formatPath(csharpExecutable))
otherArgs.foreach( arg => processParameters.add(arg) )
logInfo("Starting CSharpBackend!")
// Time to wait for CSharpBackend to initialize in seconds
val backendTimeout = sys.env.getOrElse("CSHARPBACKEND_TIMEOUT", "120").toInt
// Launch a SparkCLR backend server for the C# process to connect to; this will let it see our
// Java system properties etc.
val csharpBackend = new CSharpBackend()
val initialized = new Semaphore(0)
val csharpBackendThread = new Thread("CSharpBackend") {
override def run() {
// need to get back csharpBackendPortNumber because if the value passed to init is 0
// the port number is dynamically assigned in the backend
csharpBackendPortNumber = csharpBackend.init(csharpBackendPortNumber)
logInfo(s"Port number used by CSharpBackend is $csharpBackendPortNumber")
initialized.release()
csharpBackend.run()
}
}
csharpBackendThread.start()
if (initialized.tryAcquire(backendTimeout, TimeUnit.SECONDS)) {
if (!runInDebugMode) {
var returnCode = -1
try {
val builder = new ProcessBuilder(processParameters)
val env = builder.environment()
env.put("CSHARPBACKEND_PORT", csharpBackendPortNumber.toString)
for ((key, value) <- Utils.getSystemProperties if key.startsWith("spark.")) {
env.put(key, value)
logInfo(s"Adding key=$key and value=$value to environment")
}
builder.redirectErrorStream(true) // Ugly but needed for stdout and stderr to synchronize
val process = builder.start()
// Redirect stdin of JVM process to stdin of C# process
new RedirectThread(System.in, process.getOutputStream, "redirect JVM input").start()
// Redirect stdout and stderr of C# process
new RedirectThread(process.getInputStream, System.out, "redirect CSharp stdout").start()
new RedirectThread(process.getErrorStream, System.out, "redirect CSharp stderr").start()
returnCode = process.waitFor()
closeBackend(csharpBackend)
} catch {
case t: Throwable =>
logError(s"${t.getMessage} \n ${t.getStackTrace}")
}
logInfo(s"Return CSharpBackend code $returnCode")
CSharpSparkUtils.exit(returnCode)
} else {
// scalastyle:off println
println("***********************************************************************")
println("* [CSharpRunner.main] Backend running debug mode. Press enter to exit *")
println("***********************************************************************")
// scalastyle:on println
Console.readLine()
closeBackend(csharpBackend)
CSharpSparkUtils.exit(0)
}
} else {
logError(s"CSharpBackend did not initialize in $backendTimeout seconds")
CSharpSparkUtils.exit(-1)
}
}