in connectors/spark-iotdb-connector/src/main/scala/org/apache/iotdb/spark/db/Converter.scala [50:88]
def toSparkSchema(options: IoTDBOptions): StructType = {
Class.forName("org.apache.iotdb.jdbc.IoTDBDriver")
val sqlConn: Connection = DriverManager.getConnection(options.url, options.user, options.password)
val sqlStatement: Statement = sqlConn.createStatement()
val hasResultSet: Boolean = sqlStatement.execute(options.sql)
val fields = new ListBuffer[StructField]()
if (hasResultSet) {
val resultSet: ResultSet = sqlStatement.getResultSet
val resultSetMetaData: ResultSetMetaData = resultSet.getMetaData
val printTimestamp = !resultSet.asInstanceOf[IoTDBJDBCResultSet].isIgnoreTimeStamp
if (printTimestamp) {
fields += StructField(SQLConstant.TIMESTAMP_STR, LongType, nullable = false)
}
val colCount = resultSetMetaData.getColumnCount
var startIndex = 2
if (!"Time".equals(resultSetMetaData.getColumnName(1))) {
startIndex = 1
}
for (i <- startIndex to colCount) {
fields += StructField(resultSetMetaData.getColumnLabel(i), resultSetMetaData.getColumnType(i) match {
case Types.BOOLEAN => BooleanType
case Types.INTEGER => IntegerType
case Types.BIGINT => LongType
case Types.FLOAT => FloatType
case Types.DOUBLE => DoubleType
case Types.VARCHAR => StringType
case other => throw new UnsupportedOperationException(s"Unsupported type $other")
}, nullable = true)
}
StructType(fields.toList)
}
else {
StructType(fields)
}
}