in tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java [311:418]
public final void addTable(final CatalogProtos.TableDescProto tableDescProto) throws CatalogException {
String dbName = null, tableName = null;
Pair<String, String> tablePair = null;
HCatalogStoreClientPool.HCatalogStoreClient client = null;
TableDesc tableDesc = new TableDesc(tableDescProto);
// get db name and table name.
try {
tablePair = HCatUtil.getDbAndTableName(tableDesc.getName());
dbName = tablePair.first;
tableName = tablePair.second;
} catch (Exception ioe) {
throw new CatalogException("Table name is wrong.", ioe);
}
try {
client = clientPool.getClient();
org.apache.hadoop.hive.metastore.api.Table table = new org.apache.hadoop.hive.metastore.api.Table();
table.setDbName(dbName);
table.setTableName(tableName);
// TODO: set owner
//table.setOwner();
StorageDescriptor sd = new StorageDescriptor();
sd.setParameters(new HashMap<String, String>());
sd.setSerdeInfo(new SerDeInfo());
sd.getSerdeInfo().setParameters(new HashMap<String, String>());
sd.getSerdeInfo().setName(table.getTableName());
// if tajo set location method, thrift client make exception as follows:
// Caused by: MetaException(message:java.lang.NullPointerException)
// If you want to modify table path, you have to modify on Hive cli.
// sd.setLocation(tableDesc.getPath().toString());
// set column information
List<Column> columns = tableDesc.getSchema().getColumns();
ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(columns.size());
for (Column eachField : columns) {
cols.add(new FieldSchema(eachField.getSimpleName(),
HCatalogUtil.getHiveFieldType(eachField.getDataType().getType().name()), ""));
}
sd.setCols(cols);
// set partition keys
if (tableDesc.hasPartition() && tableDesc.getPartitionMethod().getPartitionType().equals(PartitionType.COLUMN)) {
List<FieldSchema> partitionKeys = new ArrayList<FieldSchema>();
for(Column eachPartitionKey: tableDesc.getPartitionMethod().getExpressionSchema().getColumns()) {
partitionKeys.add(new FieldSchema( eachPartitionKey.getSimpleName(),
HCatalogUtil.getHiveFieldType(eachPartitionKey.getDataType().getType().name()), ""));
}
table.setPartitionKeys(partitionKeys);
}
if (tableDesc.getMeta().getStoreType().equals(CatalogProtos.StoreType.RCFILE)) {
String serde = tableDesc.getMeta().getOption(RCFILE_SERDE);
sd.setInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat.class.getName());
sd.setOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat.class.getName());
if (RCFILE_TEXT_SERDE.equals(serde)) {
sd.getSerdeInfo().setSerializationLib(org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe.class.getName());
} else {
sd.getSerdeInfo().setSerializationLib(
org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe.class.getName());
}
if (tableDesc.getMeta().getOption(RCFILE_NULL) != null) {
sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_NULL_FORMAT,
StringEscapeUtils.unescapeJava(tableDesc.getMeta().getOption(RCFILE_NULL)));
}
} else if (tableDesc.getMeta().getStoreType().equals(CatalogProtos.StoreType.CSV)) {
sd.getSerdeInfo().setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
sd.setInputFormat(org.apache.hadoop.mapred.TextInputFormat.class.getName());
sd.setOutputFormat(org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat.class.getName());
String fieldDelimiter = tableDesc.getMeta().getOption(CSVFILE_DELIMITER, CSVFILE_DELIMITER_DEFAULT);
// User can use an unicode for filed delimiter such as \u0001, \001.
// In this case, java console will convert this value into "\\u001".
// And hive will un-espace this value again.
// As a result, user can use right field delimiter.
// So, we have to un-escape this value.
sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT,
StringEscapeUtils.unescapeJava(fieldDelimiter));
sd.getSerdeInfo().getParameters().put(serdeConstants.FIELD_DELIM,
StringEscapeUtils.unescapeJava(fieldDelimiter));
if (tableDesc.getMeta().getOption(CSVFILE_NULL) != null) {
sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_NULL_FORMAT,
StringEscapeUtils.unescapeJava(tableDesc.getMeta().getOption(CSVFILE_NULL)));
}
} else {
throw new CatalogException(new NotImplementedException(tableDesc.getMeta().getStoreType().name()));
}
sd.setSortCols(new ArrayList<Order>());
table.setSd(sd);
client.getHiveClient().createTable(table);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new CatalogException(e);
} finally {
client.release();
}
}