private def concatCmdOptions()

in scala/src/main/org/apache/spark/launcher/SparkCLRSubmitArguments.scala [391:447]


  private def concatCmdOptions(): Unit = {

    if (appName == null) cmd = cmd.trim + s" --name " + mainExecutable.stripSuffix(".exe")

    master match {

      case m if m == null || m.startsWith("local") => concatLocalCmdOptions()

      case m if m.toLowerCase.startsWith("spark://") && deployMode == "cluster" => {
        val remoteDriverPath = primaryResource
        files = files match {
          case null => remoteDriverPath
          case _ => files + ("," + remoteDriverPath)
        }

        if(jars != null && !jars.isEmpty) cmd += (s" --jars $jars")

        cmd += (s" --files $files --class $csharpRunnerClass $remoteSparkCLRJarPath" +
          s" $remoteDriverPath $mainExecutable")
        if (childArgs.length > 1) cmd += (" " + childArgs.mkString(" "))
      }

      case _ => {

        if (jars != null && !jars.isEmpty) cmd = cmd.trim + s" --jars $jars"

        findMainExecutable()
        val zippedPrimaryResource: File = zipPrimaryResource()

        files match {
          case null => files = zippedPrimaryResource.getPath
          case _ => files += ("," + zippedPrimaryResource.getPath)
        }

        if (files != null) cmd += s" --files $files"

        deployMode match {

          case "client" => {
            cmd += (s" --class $csharpRunnerClass $sparkCLRJarPath " + primaryResource)
          }

          case "cluster" => {
            cmd += (s" --class $csharpRunnerClass $sparkCLRJarPath "
              + zippedPrimaryResource.getName)
          }

          case _ =>
        }

        if (mainExecutable != null) cmd += s" $mainExecutable"

        if (childArgs.nonEmpty) cmd += (" " + childArgs.mkString(" "))

      }
    }
  }