in spark/sql-30/src/main/scala/org/elasticsearch/spark/sql/DataFrameValueWriter.scala [249:275]
private[spark] def writePrimitive(schema: DataType, value: Any, generator: Generator): Result = {
if (value == null) {
generator.writeNull()
}
else schema match {
case BinaryType => generator.writeBinary(value.asInstanceOf[Array[Byte]])
case BooleanType => generator.writeBoolean(value.asInstanceOf[Boolean])
case ByteType => generator.writeNumber(value.asInstanceOf[Byte])
case ShortType => generator.writeNumber(value.asInstanceOf[Short])
case IntegerType => generator.writeNumber(value.asInstanceOf[Int])
case LongType => generator.writeNumber(value.asInstanceOf[Long])
case DoubleType => generator.writeNumber(value.asInstanceOf[Double])
case FloatType => generator.writeNumber(value.asInstanceOf[Float])
case TimestampType => generator.writeNumber(value.asInstanceOf[Timestamp].getTime())
case DateType => generator.writeNumber(value.asInstanceOf[Date].getTime())
case StringType => generator.writeString(value.toString)
case _ => {
val className = schema.getClass().getName()
if ("org.apache.spark.sql.types.DecimalType".equals(className) || "org.apache.spark.sql.catalyst.types.DecimalType".equals(className)) {
throw new EsHadoopSerializationException("Decimal types are not supported by Elasticsearch - consider using a different type (such as string)")
}
return handleUnknown(value, generator)
}
}
Result.SUCCESFUL()
}