in metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/HiveConnectorTableService.java [163:266]
void updateTable(
final ConnectorRequestContext requestContext,
final Table table,
final TableInfo tableInfo
) throws MetaException {
if (table.getParameters() == null || table.getParameters().isEmpty()) {
table.setParameters(Maps.newHashMap());
}
//if this a type of table, we all mark it external table
//otherwise leaves it as such as VIRTUAL_VIEW
if (!isVirtualView(table)) {
table.getParameters().putIfAbsent(PARAMETER_EXTERNAL, "TRUE");
} else {
validAndUpdateVirtualView(table);
}
if (tableInfo.getMetadata() != null) {
table.getParameters().putAll(tableInfo.getMetadata());
}
//no other information is needed for iceberg table
if (connectorContext.getConfig().isIcebergEnabled() && HiveTableUtil.isIcebergTable(tableInfo)) {
table.setPartitionKeys(Collections.emptyList());
log.debug("Skipping seder and set partition key to empty when updating iceberg table in hive");
return;
}
//storage
final StorageDescriptor sd = table.getSd() != null ? table.getSd() : new StorageDescriptor();
String inputFormat = null;
String outputFormat = null;
Map<String, String> sdParameters = Maps.newHashMap();
final String location =
tableInfo.getSerde() == null ? null : tableInfo.getSerde().getUri();
if (location != null) {
sd.setLocation(location);
} else if (sd.getLocation() == null) {
final String locationStr = hiveConnectorDatabaseService.get(requestContext,
QualifiedName.ofDatabase(tableInfo.getName().
getCatalogName(), tableInfo.getName().getDatabaseName())).getUri();
final Path databasePath = new Path(locationStr);
final Path targetPath = new Path(databasePath, tableInfo.getName().getTableName());
sd.setLocation(targetPath.toString());
}
if (sd.getSerdeInfo() == null) {
sd.setSerdeInfo(new SerDeInfo());
}
final SerDeInfo serdeInfo = sd.getSerdeInfo();
serdeInfo.setName(tableInfo.getName().getTableName());
final StorageInfo storageInfo = tableInfo.getSerde();
if (storageInfo != null) {
if (!Strings.isNullOrEmpty(storageInfo.getSerializationLib())) {
serdeInfo.setSerializationLib(storageInfo.getSerializationLib());
}
if (storageInfo.getSerdeInfoParameters() != null && !storageInfo.getSerdeInfoParameters().isEmpty()) {
serdeInfo.setParameters(storageInfo.getSerdeInfoParameters());
}
inputFormat = storageInfo.getInputFormat();
outputFormat = storageInfo.getOutputFormat();
if (storageInfo.getParameters() != null && !storageInfo.getParameters().isEmpty()) {
sdParameters = storageInfo.getParameters();
}
} else if (table.getSd() != null) {
final HiveStorageFormat hiveStorageFormat = this.extractHiveStorageFormat(table);
serdeInfo.setSerializationLib(hiveStorageFormat.getSerde());
serdeInfo.setParameters(ImmutableMap.of());
inputFormat = hiveStorageFormat.getInputFormat();
outputFormat = hiveStorageFormat.getOutputFormat();
}
final ImmutableList.Builder<FieldSchema> columnsBuilder = ImmutableList.builder();
final ImmutableList.Builder<FieldSchema> partitionKeysBuilder = ImmutableList.builder();
if (tableInfo.getFields() != null) {
for (FieldInfo column : tableInfo.getFields()) {
final FieldSchema field = hiveMetacatConverters.metacatToHiveField(column);
if (column.isPartitionKey()) {
partitionKeysBuilder.add(field);
} else {
columnsBuilder.add(field);
}
}
}
final ImmutableList<FieldSchema> columns = columnsBuilder.build();
if (!columns.isEmpty()) {
sd.setCols(columns);
}
if (!Strings.isNullOrEmpty(inputFormat)) {
sd.setInputFormat(inputFormat);
}
if (!Strings.isNullOrEmpty(outputFormat)) {
sd.setOutputFormat(outputFormat);
}
if (sd.getParameters() == null) {
sd.setParameters(sdParameters);
}
//partition keys
final ImmutableList<FieldSchema> partitionKeys = partitionKeysBuilder.build();
if (!partitionKeys.isEmpty()) {
table.setPartitionKeys(partitionKeys);
}
table.setSd(sd);
}