public static void main()

in spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java [52:89]


  public static void main(String[] args) {
    if (args.length < 1) {
      System.out.println("JavaHBaseBulkLoadExample  " + "{outputPath}");
      return;
    }

    String tableName = "bulkload-table-test";
    String columnFamily1 = "f1";
    String columnFamily2 = "f2";

    SparkConf sparkConf = new SparkConf().setAppName("JavaHBaseBulkLoadExample " + tableName);
    JavaSparkContext jsc = new JavaSparkContext(sparkConf);

    try {
      List<String> list= new ArrayList<String>();
      // row1
      list.add("1," + columnFamily1 + ",b,1");
      // row3
      list.add("3," + columnFamily1 + ",a,2");
      list.add("3," + columnFamily1 + ",b,1");
      list.add("3," + columnFamily2 + ",a,1");
      /* row2 */
      list.add("2," + columnFamily2 + ",a,3");
      list.add("2," + columnFamily2 + ",b,3");

      JavaRDD<String> rdd = jsc.parallelize(list);

      Configuration conf = HBaseConfiguration.create();
      JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);



      hbaseContext.bulkLoad(rdd, TableName.valueOf(tableName),new BulkLoadFunction(), args[0],
          new HashMap<byte[], FamilyHFileWriteOptions>(), false, HConstants.DEFAULT_MAX_FILE_SIZE);
    } finally {
      jsc.stop();
    }
  }