in spark/src/main/java/org/apache/accumulo/spark/CopyPlus5K.java [71:108]
private static void cleanupAndCreateTables(Properties props) throws Exception {
FileSystem hdfs = FileSystem.get(new Configuration());
if (hdfs.exists(rootPath)) {
hdfs.delete(rootPath, true);
}
try (AccumuloClient client = Accumulo.newClient().from(props).build()) {
if (client.tableOperations().exists(inputTable)) {
client.tableOperations().delete(inputTable);
}
if (client.tableOperations().exists(outputTable)) {
client.tableOperations().delete(outputTable);
}
// Create tables
try {
client.tableOperations().create(inputTable);
} catch (TableExistsException e) {
log.error("Something went wrong. Table '{}' should have been deleted prior to creation "
+ "attempt!", inputTable);
return;
}
try {
client.tableOperations().create(outputTable);
} catch (TableExistsException e) {
log.error("Something went wrong. Table '{}' should have been deleted prior to creation "
+ "attempt!", inputTable);
return;
}
// Write data to input table
try (BatchWriter bw = client.createBatchWriter(inputTable)) {
for (int i = 0; i < 100; i++) {
Mutation m = new Mutation(String.format("%03d", i));
m.at().family("cf1").qualifier("cq1").put("" + i);
bw.addMutation(m);
}
}
}
}