in spark/common/src/main/java/org/apache/sedona/viz/showcase/Example.java [359:453]
public static void main(String[] args) throws IOException {
SparkConf sparkConf = new SparkConf().setAppName("SedonaVizDemo").setMaster("local[4]");
sparkContext = new JavaSparkContext(sparkConf);
Logger.getLogger("org").setLevel(Level.WARN);
Logger.getLogger("akka").setLevel(Level.WARN);
prop = new Properties();
String resourcePath = "/../spark/common/src/test/resources/";
String demoOutputPath = "target/demo";
FileInputStream ConfFile = new FileInputStream(resourcePath + "babylon.point.properties");
prop.load(ConfFile);
String scatterPlotOutputPath =
System.getProperty("user.dir") + "/" + demoOutputPath + "/scatterplot";
String heatMapOutputPath = System.getProperty("user.dir") + "/" + demoOutputPath + "/heatmap";
String choroplethMapOutputPath =
System.getProperty("user.dir") + "/" + demoOutputPath + "/choroplethmap";
String parallelFilterRenderStitchOutputPath =
System.getProperty("user.dir")
+ "/"
+ demoOutputPath
+ "/parallelfilterrenderstitchheatmap";
String earthdataScatterPlotOutputPath =
System.getProperty("user.dir") + "/" + demoOutputPath + "/earthdatascatterplot";
PointInputLocation =
"file://"
+ System.getProperty("user.dir")
+ "/"
+ resourcePath
+ prop.getProperty("inputLocation");
PointOffset = Integer.parseInt(prop.getProperty("offset"));
PointSplitter = FileDataSplitter.getFileDataSplitter(prop.getProperty("splitter"));
PointNumPartitions = Integer.parseInt(prop.getProperty("numPartitions"));
ConfFile = new FileInputStream(resourcePath + "babylon.rectangle.properties");
prop.load(ConfFile);
RectangleInputLocation =
"file://"
+ System.getProperty("user.dir")
+ "/"
+ resourcePath
+ prop.getProperty("inputLocation");
RectangleOffset = Integer.parseInt(prop.getProperty("offset"));
RectangleSplitter = FileDataSplitter.getFileDataSplitter(prop.getProperty("splitter"));
RectangleNumPartitions = Integer.parseInt(prop.getProperty("numPartitions"));
ConfFile = new FileInputStream(resourcePath + "babylon.polygon.properties");
prop.load(ConfFile);
PolygonInputLocation =
"file://"
+ System.getProperty("user.dir")
+ "/"
+ resourcePath
+ prop.getProperty("inputLocation");
PolygonOffset = Integer.parseInt(prop.getProperty("offset"));
PolygonSplitter = FileDataSplitter.getFileDataSplitter(prop.getProperty("splitter"));
PolygonNumPartitions = Integer.parseInt(prop.getProperty("numPartitions"));
ConfFile = new FileInputStream(resourcePath + "babylon.linestring.properties");
prop.load(ConfFile);
LineStringInputLocation =
"file://"
+ System.getProperty("user.dir")
+ "/"
+ resourcePath
+ prop.getProperty("inputLocation");
LineStringOffset = Integer.parseInt(prop.getProperty("offset"));
LineStringSplitter = FileDataSplitter.getFileDataSplitter(prop.getProperty("splitter"));
LineStringNumPartitions = Integer.parseInt(prop.getProperty("numPartitions"));
USMainLandBoundary = new Envelope(-126.790180, -64.630926, 24.863836, 50.000);
earthdataInputLocation =
System.getProperty("user.dir") + "/../spark/common/src/test/resources/modis/modis.csv";
earthdataNumPartitions = 5;
HDFIncrement = 5;
HDFOffset = 2;
HDFRootGroupName = "MOD_Swath_LST";
HDFDataVariableName = "LST";
HDFswitchXY = true;
urlPrefix = System.getProperty("user.dir") + "/../spark/common/src/test/resources/modis/";
if (buildScatterPlot(scatterPlotOutputPath)
&& buildHeatMap(heatMapOutputPath)
&& buildChoroplethMap(choroplethMapOutputPath)
&& parallelFilterRenderStitch(parallelFilterRenderStitchOutputPath + "-stitched")
&& parallelFilterRenderNoStitch(parallelFilterRenderStitchOutputPath)
&& earthdataVisualization(earthdataScatterPlotOutputPath)) {
System.out.println("All 5 Demos have passed.");
} else {
System.out.println("Demos failed.");
}
sparkContext.stop();
}