in modules/spark-ext/spark/src/main/scala/org/apache/ignite/spark/impl/QueryUtils.scala [122:164]
private def dataType(field: StructField): String = field.dataType match {
case BooleanType ⇒
"BOOLEAN"
case ByteType ⇒
"TINYINT"
case ShortType ⇒
"SMALLINT"
case IntegerType ⇒
"INT"
case LongType ⇒
"BIGINT"
case FloatType ⇒
"FLOAT"
case DoubleType ⇒
"DOUBLE"
//For now Ignite doesn't provide correct information about DECIMAL column precision and scale.
//All we have is default scale and precision.
//Just replace it with some "common sense" values.
case decimal: DecimalType if decimal.precision == 10 && decimal.scale == 0 ⇒
s"DECIMAL(10, 5)"
case decimal: DecimalType ⇒
s"DECIMAL(${decimal.precision}, ${decimal.scale})"
case StringType ⇒
"VARCHAR"
case DateType ⇒
"DATE"
case TimestampType ⇒
"TIMESTAMP"
case _ ⇒
throw new IgniteException(s"Unsupported data type ${field.dataType}")
}