in core/src/main/java/com/alibaba/druid/sql/dialect/hive/parser/HiveCreateTableParser.java [127:355]
protected void parseCreateTableRest(SQLCreateTableStatement createTable) {
HiveCreateTableStatement stmt = (HiveCreateTableStatement) createTable;
if (lexer.nextIfIdentifier(FnvHash.Constants.ENGINE)) {
// skip engine=xxx
accept(Token.EQ);
lexer.nextToken();
}
if (lexer.nextIfIdentifier(FnvHash.Constants.CHARSET)) {
// skip charset = xxx
accept(Token.EQ);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.USING) || lexer.token() == Token.USING) {
lexer.nextToken();
SQLExpr using = this.exprParser.expr();
stmt.setUsing(using);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.OPTIONS)) {
accept(Token.LPAREN);
parseAssignItems(stmt.getTableOptions(), stmt, false);
accept(Token.RPAREN);
}
if (lexer.nextIf(Token.COMMENT)) {
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.MAPPED)) {
accept(Token.BY);
this.exprParser.parseAssignItem(stmt.getMappedBy(), stmt);
}
if (lexer.nextIf(Token.PARTITIONED)) {
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier. " + lexer.info());
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.addPartitionColumn(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.CLUSTERED)) {
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addClusteredByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
accept(Token.INTO);
if (lexer.token() == Token.LITERAL_INT) {
stmt.setBuckets(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("into buckets must be integer. " + lexer.info());
}
acceptIdentifier("BUCKETS");
}
if (lexer.nextIfIdentifier(FnvHash.Constants.SKEWED)) {
accept(Token.BY);
accept(Token.LPAREN);
this.exprParser.exprList(stmt.getSkewedBy(), stmt);
accept(Token.RPAREN);
accept(Token.ON);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() == Token.LPAREN) {
SQLListExpr list = new SQLListExpr();
lexer.nextToken();
this.exprParser.exprList(list.getItems(), list);
accept(Token.RPAREN);
stmt.addSkewedByOn(list);
} else {
SQLExpr expr = this.exprParser.expr();
stmt.addSkewedByOn(expr);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
if (lexer.nextIfIdentifier(FnvHash.Constants.STORED)) {
accept(Token.AS);
acceptIdentifier("DIRECTORIES");
stmt.setSkewedByStoreAsDirectories(true);
}
}
if (lexer.token() == Token.ROW
|| lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (Token.LBRACKET.equals(lexer.token())) {
stmt.setLbracketUse(true);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
lexer.nextToken();
if (lexer.token() == Token.BY) {
accept(Token.BY);
SQLName name = this.exprParser.name();
stmt.setStoredBy(name);
parseCreateTableWithSerderPropertie(stmt);
} else {
accept(Token.AS);
if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
HiveInputOutputFormat format = new HiveInputOutputFormat();
lexer.nextToken();
format.setInput(this.exprParser.primary());
if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
lexer.nextToken();
format.setOutput(this.exprParser.primary());
}
stmt.setStoredAs(format);
} else {
SQLName name = this.exprParser.name();
stmt.setStoredAs(name);
}
}
}
if (Token.RBRACKET.equals(lexer.token())) {
stmt.setRbracketUse(true);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
lexer.nextToken();
SQLExpr location = this.exprParser.primary();
stmt.setLocation(location);
}
if (lexer.token() == Token.LIKE) {
parseLike(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
parseOptions(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.META)) {
lexer.nextToken();
acceptIdentifier("LIFECYCLE");
stmt.setLifeCycle(this.exprParser.primary());
}
createTableQuery(stmt);
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
Lexer.SavePoint mark = lexer.mark();
if (lexer.token() == Token.SELECT) {
stmt.setLikeQuery(true);
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
} else {
lexer.reset(mark);
if (lexer.identifierEquals(FnvHash.Constants.MAPPING)) {
SQLExpr like = this.exprParser.primary();
stmt.setLike(new SQLExprTableSource(like));
} else {
SQLName name = this.exprParser.name();
stmt.setLike(name);
}
}
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.identifierEquals(FnvHash.Constants.USING) || lexer.token() == Token.USING) {
lexer.nextToken();
SQLExpr using = this.exprParser.expr();
stmt.setUsing(using);
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
lexer.nextToken();
accept(Token.LPAREN);
parseAssignItems(stmt.getTableOptions(), stmt, false);
accept(Token.RPAREN);
}
}