in metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/util/PartitionFilterGenerator.java [92:190]
private String createSqlCriteria(final Object lhs, final Object rhs, final Compare comparison, final boolean not) {
String key = null;
Object value = null;
boolean isKeyLhs = true;
//
// lhs, rhs or both can be keys
//
if (lhs instanceof String && isKey((String) lhs)) {
key = lhs.toString();
value = rhs;
} else if (rhs instanceof String && isKey((String) rhs)) {
key = rhs.toString();
value = lhs;
isKeyLhs = false;
}
if (key == null || value == null) {
throw new RuntimeException("Invalid expression key/value " + lhs + "/" + rhs);
}
final PartitionCol partCol = partitionColumns.get(key.toLowerCase());
final String valueStr = value.toString();
final String operator = not ? "not " + comparison.getExpression() : comparison.getExpression();
if (partCol != null && valueStr != null && (partitionColumns.containsKey(valueStr.toLowerCase()))) {
// Key part column
partCol.occurred();
final FilterType colType = partCol.type;
optimized = false;
// Value part column
final PartitionCol valuePartCol = partitionColumns.get(valueStr);
valuePartCol.occurred();
final FilterType valueColType = valuePartCol.type;
if (colType != valueColType) {
throw new RuntimeException(
String.format("Invalid column comparison with key as %s and"
+ " value as %s", colType, valueColType));
}
return String.format("%s %s %s", getSQLExpression(partCol), operator, getSQLExpression(valuePartCol));
} else if (partCol != null) {
partCol.occurred();
// For more optimization
if (partCol.hasOccurredOnlyOnce() && Compare.EQ.equals(comparison)) {
partVals.set(partCol.index, key + "="
+ (escapePartitionNameOnFilter ? FileUtils.escapePathName(valueStr) : valueStr));
} else {
optimized = false;
}
final FilterType colType = partCol.type;
if (colType == FilterType.Invalid) {
throw new RuntimeException("Invalid type " + colType);
}
FilterType valType = FilterType.fromClass(value);
if (valType == FilterType.Invalid) {
throw new RuntimeException("Invalid value " + value.getClass());
}
if (colType == FilterType.Date && valType == FilterType.String) {
try {
value = new java.sql.Date(
HiveMetaStore.PARTITION_DATE_FORMAT.get().parse((String) value).getTime());
valType = FilterType.Date;
} catch (ParseException pe) { // do nothing, handled below - types will mismatch
}
} else if (colType == FilterType.Timestamp && valType == FilterType.String) {
try {
value = new java.sql.Timestamp(
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse((String) value).getTime());
valType = FilterType.Timestamp;
} catch (ParseException pe) { // do nothing, handled below - types will mismatch
}
} else if (colType == FilterType.Integral && valType == FilterType.String) {
try {
value = new BigDecimal((String) value);
valType = FilterType.Integral;
} catch (NumberFormatException pe) { // do nothing, handled below - types will mismatch
}
} else if (colType == FilterType.String && valType != FilterType.String) {
value = value.toString();
valType = FilterType.String;
}
if (colType != valType) {
throw new RuntimeException("Invalid value " + value.getClass());
}
key = getSQLExpression(partCol);
params.add(value);
} else if ("batchid".equalsIgnoreCase(key)) {
return "1=1";
} else if ("dateCreated".equalsIgnoreCase(key)) {
optimized = false;
key = "p.CREATE_TIME";
params.add(value);
} else {
throw new RuntimeException("Invalid expression key " + key);
}
return isKeyLhs ? String.format("%s %s %s", key, operator, "?")
: String.format("%s %s %s", "?", operator, key);
}