in odps-sqoop/src/java/org/apache/sqoop/hive/TableDefWriter.java [116:224]
public String getCreateTableStmt() throws IOException {
Map<String, Integer> columnTypes;
Properties userMapping = options.getMapColumnHive();
if (externalColTypes != null) {
// Use pre-defined column types.
columnTypes = externalColTypes;
} else {
// Get these from the database.
if (null != inputTableName) {
columnTypes = connManager.getColumnTypes(inputTableName);
} else {
columnTypes = connManager.getColumnTypesForQuery(options.getSqlQuery());
}
}
String [] colNames = getColumnNames();
StringBuilder sb = new StringBuilder();
if (options.doFailIfHiveTableExists()) {
sb.append("CREATE TABLE `");
} else {
sb.append("CREATE TABLE IF NOT EXISTS `");
}
if(options.getHiveDatabaseName() != null) {
sb.append(options.getHiveDatabaseName()).append("`.`");
}
sb.append(outputTableName).append("` ( ");
// Check that all explicitly mapped columns are present in result set
for(Object column : userMapping.keySet()) {
boolean found = false;
for(String c : colNames) {
if (c.equals(column)) {
found = true;
break;
}
}
if (!found) {
throw new IllegalArgumentException("No column by the name " + column
+ "found while importing data");
}
}
boolean first = true;
String partitionKey = options.getHivePartitionKey();
for (String col : colNames) {
if (col.equals(partitionKey)) {
throw new IllegalArgumentException("Partition key " + col + " cannot "
+ "be a column to import.");
}
if (!first) {
sb.append(", ");
}
first = false;
Integer colType = columnTypes.get(col);
String hiveColType = userMapping.getProperty(col);
if (hiveColType == null) {
hiveColType = connManager.toHiveType(inputTableName, col, colType);
}
if (null == hiveColType) {
throw new IOException("Hive does not support the SQL type for column "
+ col);
}
sb.append('`').append(col).append("` ").append(hiveColType);
if (HiveTypes.isHiveTypeImprovised(colType)) {
LOG.warn(
"Column " + col + " had to be cast to a less precise type in Hive");
}
}
sb.append(") ");
if (commentsEnabled) {
DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
String curDateStr = dateFormat.format(new Date());
sb.append("COMMENT 'Imported by sqoop on " + curDateStr + "' ");
}
if (partitionKey != null) {
sb.append("PARTITIONED BY (")
.append(partitionKey)
.append(" STRING) ");
}
sb.append("ROW FORMAT DELIMITED FIELDS TERMINATED BY '");
sb.append(getHiveOctalCharCode((int) options.getOutputFieldDelim()));
sb.append("' LINES TERMINATED BY '");
sb.append(getHiveOctalCharCode((int) options.getOutputRecordDelim()));
String codec = options.getCompressionCodec();
if (codec != null && (codec.equals(CodecMap.LZOP)
|| codec.equals(CodecMap.getCodecClassName(CodecMap.LZOP)))) {
sb.append("' STORED AS INPUTFORMAT "
+ "'com.hadoop.mapred.DeprecatedLzoTextInputFormat'");
sb.append(" OUTPUTFORMAT "
+ "'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'");
} else {
sb.append("' STORED AS TEXTFILE");
}
LOG.debug("Create statement: " + sb.toString());
return sb.toString();
}