in src/main/java/com/aliyun/emr/example/spark/SparkOssJavaDemo.java [29:53]
public static void main(String[] args) {
String accessId = args[0];
String accessKey = args[1];
String endpoint = args[2];
String inputPath = args[3];
String outputPath = args[4];
int partition = Integer.valueOf(args[5]);
SparkConf sparkConf = new SparkConf().setAppName("E-MapReduce Demo 2-2: Spark Oss Demo (Java)").setMaster("local[4]");
sparkConf.set("spark.hadoop.fs.oss.accessKeyId", accessId);
sparkConf.set("spark.hadoop.fs.oss.accessKeySecret", accessKey);
sparkConf.set("spark.hadoop.fs.oss.endpoint", endpoint);
sparkConf.set("spark.hadoop.fs.oss.impl", "com.aliyun.fs.oss.nat.NativeOssFileSystem");
sparkConf.set("spark.hadoop.mapreduce.job.run-local", "true");
JavaSparkContext jsc = new JavaSparkContext(sparkConf);
JavaPairRDD<LongWritable, Text> data = jsc.hadoopFile(inputPath, TextInputFormat.class, LongWritable.class, Text.class, partition);
System.out.println("Count (data): " + String.valueOf(data.count()));
data.saveAsTextFile(outputPath);
}