in spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/io/geojson/GeoJSONJacksonGenerator.scala [87:174]
private def makeWriter(dataType: DataType, name: String): ValueWriter = dataType match {
case NullType =>
(row: SpecializedGetters, ordinal: Int) => gen.writeNull()
case BooleanType =>
(row: SpecializedGetters, ordinal: Int) => gen.writeBoolean(row.getBoolean(ordinal))
case ByteType =>
(row: SpecializedGetters, ordinal: Int) => gen.writeNumber(row.getByte(ordinal))
case ShortType =>
(row: SpecializedGetters, ordinal: Int) => gen.writeNumber(row.getShort(ordinal))
case IntegerType =>
(row: SpecializedGetters, ordinal: Int) => gen.writeNumber(row.getInt(ordinal))
case LongType =>
(row: SpecializedGetters, ordinal: Int) => gen.writeNumber(row.getLong(ordinal))
case FloatType =>
(row: SpecializedGetters, ordinal: Int) => gen.writeNumber(row.getFloat(ordinal))
case DoubleType =>
(row: SpecializedGetters, ordinal: Int) => gen.writeNumber(row.getDouble(ordinal))
case StringType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeString(row.getUTF8String(ordinal).toString)
case TimestampType =>
(row: SpecializedGetters, ordinal: Int) =>
val timestampString = timestampFormatter.format(row.getLong(ordinal))
gen.writeString(timestampString)
case DateType =>
(row: SpecializedGetters, ordinal: Int) =>
val dateString = dateFormatter.format(row.getInt(ordinal))
gen.writeString(dateString)
case CalendarIntervalType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeString(row.getInterval(ordinal).toString)
case BinaryType =>
(row: SpecializedGetters, ordinal: Int) => gen.writeBinary(row.getBinary(ordinal))
case dt: DecimalType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeNumber(row.getDecimal(ordinal, dt.precision, dt.scale).toJavaBigDecimal)
case st: StructType =>
val fieldWriters = st.map(field => makeWriter(field.dataType, name + "." + field.name))
(row: SpecializedGetters, ordinal: Int) =>
writeObject(writeFields(name, row.getStruct(ordinal, st.length), st, fieldWriters))
case at: ArrayType =>
val elementWriter = makeWriter(at.elementType, name)
(row: SpecializedGetters, ordinal: Int) =>
writeArray(writeArrayData(row.getArray(ordinal), elementWriter))
case mt: MapType =>
val valueWriter = makeWriter(mt.valueType, name)
(row: SpecializedGetters, ordinal: Int) =>
writeObject(writeMapData(row.getMap(ordinal), mt, valueWriter))
case GeometryUDT =>
// We'll only write non-primary geometry columns here, we'll write it as WKT to properties object.
(row: SpecializedGetters, ordinal: Int) =>
{
val geom = GeometryUDT.deserialize(row.getBinary(ordinal))
val wkt = wktWriter.write(geom)
gen.writeString(wkt)
}
// For UDT values, they should be in the SQL type's corresponding value type.
// We should not see values in the user-defined class at here.
// For example, VectorUDT's SQL type is an array of double. So, we should expect that v is
// an ArrayData at here, instead of a Vector.
case t: UserDefinedType[_] =>
makeWriter(t.sqlType, name)
case _ =>
(row: SpecializedGetters, ordinal: Int) =>
val v = row.get(ordinal, dataType)
throw new IllegalArgumentException(
s"Unsupported dataType: ${dataType.catalogString} " +
s"with value $v")
}