in phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/TypeInfoUtils.java [424:546]
private TypeInfo parseType() {
Token t = expect("type");
// Is this a primitive type?
PrimitiveTypeEntry typeEntry =
PrimitiveObjectInspectorUtils.getTypeEntryFromTypeName(t.text);
if (typeEntry != null && typeEntry.primitiveCategory != PrimitiveCategory.UNKNOWN ) {
String[] params = parseParams();
switch (typeEntry.primitiveCategory) {
case CHAR:
case VARCHAR:
if (params == null || params.length == 0) {
throw new IllegalArgumentException(typeEntry.typeName
+ " type is specified without length: " + typeInfoString);
}
int length = 1;
if (params.length == 1) {
length = Integer.parseInt(params[0]);
if (typeEntry.primitiveCategory == PrimitiveCategory.VARCHAR) {
BaseCharUtils.validateVarcharParameter(length);
return TypeInfoFactory.getVarcharTypeInfo(length);
} else {
BaseCharUtils.validateCharParameter(length);
return TypeInfoFactory.getCharTypeInfo(length);
}
} else if (params.length > 1) {
throw new IllegalArgumentException(
"Type " + typeEntry.typeName+ " only takes one parameter, but " +
params.length + " is seen");
}
case DECIMAL:
int precision = HiveDecimal.USER_DEFAULT_PRECISION;
int scale = HiveDecimal.USER_DEFAULT_SCALE;
if (params == null || params.length == 0) {
// It's possible that old metadata still refers to "decimal" as a column type w/o
// precision/scale. In this case, the default (10,0) is assumed. Thus, do nothing here.
} else if (params.length == 2) {
// New metadata always have two parameters.
precision = Integer.parseInt(params[0]);
scale = Integer.parseInt(params[1]);
HiveDecimalUtils.validateParameter(precision, scale);
} else if (params.length > 2) {
throw new IllegalArgumentException("Type decimal only takes two parameter, but " +
params.length + " is seen");
}
return TypeInfoFactory.getDecimalTypeInfo(precision, scale);
default:
return TypeInfoFactory.getPrimitiveTypeInfo(typeEntry.typeName);
}
}
// Is this a list type?
if (serdeConstants.LIST_TYPE_NAME.equals(t.text)) {
expect("<");
TypeInfo listElementType = parseType();
expect(">");
return TypeInfoFactory.getListTypeInfo(listElementType);
}
// Is this a map type?
if (serdeConstants.MAP_TYPE_NAME.equals(t.text)) {
expect("<");
TypeInfo mapKeyType = parseType();
expect(",");
TypeInfo mapValueType = parseType();
expect(">");
return TypeInfoFactory.getMapTypeInfo(mapKeyType, mapValueType);
}
// Is this a struct type?
if (serdeConstants.STRUCT_TYPE_NAME.equals(t.text)) {
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>();
boolean first = true;
do {
if (first) {
expect("<");
first = false;
} else {
Token separator = expect(">", ",");
if (separator.text.equals(">")) {
// end of struct
break;
}
}
Token name = expect("name",">");
if (name.text.equals(">")) {
break;
}
fieldNames.add(name.text);
expect(":");
fieldTypeInfos.add(parseType());
} while (true);
return TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
}
// Is this a union type?
if (serdeConstants.UNION_TYPE_NAME.equals(t.text)) {
List<TypeInfo> objectTypeInfos = new ArrayList<TypeInfo>();
boolean first = true;
do {
if (first) {
expect("<");
first = false;
} else {
Token separator = expect(">", ",");
if (separator.text.equals(">")) {
// end of union
break;
}
}
objectTypeInfos.add(parseType());
} while (true);
return TypeInfoFactory.getUnionTypeInfo(objectTypeInfos);
}
throw new RuntimeException("Internal error parsing position "
+ t.position + " of '" + typeInfoString + "'");
}