in spark-utils/src/main/java/com/aliyun/odps/spark/SparkLauncherTest.java [25:59]
public static void main(String[] args) throws Exception {
Map<String, String> env = new HashMap<>();
// relace here
env.put("SPARK_HOME", "/Users/wusj/software/spark/spark-2.3.0-odps0.33.0");
CountDownLatch countDownLatch = new CountDownLatch(1);
SparkLauncher launcher = new SparkLauncher(env);
launcher.setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path"))
.setConf("spark.hadoop.odps.access.id", accessId)
.setConf("spark.hadoop.odps.access.key", accessKey)
.setConf("spark.hadoop.odps.project.name", projectName)
.setConf("spark.hadoop.odps.end.point", endPoint)
.setMainClass("JavaSparkPi")
// relace here
.setAppResource("/Users/wusj/code/spark/test.jar")
.setMaster("yarn")
.setDeployMode("cluster")
.startApplication(new SparkAppHandle.Listener(){
@Override
public void stateChanged(SparkAppHandle handle){
System.out.println("State changed to:" + handle.getState().toString());
if (handle.getState().equals(SparkAppHandle.State.RUNNING)) {
// Test kill application
killApplication(handle.getAppId());
}
if (handle.getState().isFinal()) {
countDownLatch.countDown();
}
}
@Override
public void infoChanged(SparkAppHandle handle) {
}
});
countDownLatch.await();
}