in spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala [164:205]
def createTable() {
val numReg = parameters
.get(HBaseTableCatalog.newTable)
.map(x => x.toInt)
.getOrElse(0)
val startKey = Bytes.toBytes(
parameters
.get(HBaseTableCatalog.regionStart)
.getOrElse(HBaseTableCatalog.defaultRegionStart))
val endKey = Bytes.toBytes(
parameters
.get(HBaseTableCatalog.regionEnd)
.getOrElse(HBaseTableCatalog.defaultRegionEnd))
if (numReg > 3) {
val tName = TableName.valueOf(tableName)
val cfs = catalog.getColumnFamilies
val connection = HBaseConnectionCache.getConnection(hbaseConf)
// Initialize hBase table if necessary
val admin = connection.getAdmin
try {
if (!admin.tableExists(tName)) {
val tableDesc = new HTableDescriptor(tName)
cfs.foreach {
x =>
val cf = new HColumnDescriptor(x.getBytes())
logDebug(s"add family $x to ${tableName}")
tableDesc.addFamily(cf)
}
val splitKeys = Bytes.split(startKey, endKey, numReg);
admin.createTable(tableDesc, splitKeys)
}
} finally {
admin.close()
connection.close()
}
} else {
logInfo(s"""${HBaseTableCatalog.newTable}
|is not defined or no larger than 3, skip the create table""".stripMargin)
}
}