in spark-doris-connector/spark-doris-connector-base/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowWriter.scala [48:80]
private def createFieldWriter(vector: ValueVector): ArrowFieldWriter = {
val field = vector.getField()
(ArrowUtils.fromArrowField(field), vector) match {
case (BooleanType, vector: BitVector) => new BooleanWriter(vector)
case (ByteType, vector: TinyIntVector) => new ByteWriter(vector)
case (ShortType, vector: SmallIntVector) => new ShortWriter(vector)
case (IntegerType, vector: IntVector) => new IntegerWriter(vector)
case (LongType, vector: BigIntVector) => new LongWriter(vector)
case (FloatType, vector: Float4Vector) => new FloatWriter(vector)
case (DoubleType, vector: Float8Vector) => new DoubleWriter(vector)
case (DecimalType.Fixed(precision, scale), vector: DecimalVector) =>
new DecimalWriter(vector, precision, scale)
case (StringType, vector: VarCharVector) => new StringWriter(vector)
case (BinaryType, vector: VarBinaryVector) => new BinaryWriter(vector)
case (DateType, vector: DateDayVector) => new DateWriter(vector)
case (TimestampType, vector: TimeStampMicroTZVector) => new TimestampWriter(vector)
case (ArrayType(_, _), vector: ListVector) =>
val elementVector = createFieldWriter(vector.getDataVector())
new ArrayWriter(vector, elementVector)
case (MapType(_, _, _), vector: MapVector) =>
val structVector = vector.getDataVector.asInstanceOf[StructVector]
val keyWriter = createFieldWriter(structVector.getChild(MapVector.KEY_NAME))
val valueWriter = createFieldWriter(structVector.getChild(MapVector.VALUE_NAME))
new MapWriter(vector, structVector, keyWriter, valueWriter)
case (StructType(_), vector: StructVector) =>
val children = (0 until vector.size()).map { ordinal =>
createFieldWriter(vector.getChildByOrdinal(ordinal))
}
new StructWriter(vector, children.toArray)
case (dt, _) =>
throw new UnsupportedOperationException(s"Unsupported data type: ${dt.catalogString}")
}
}