in client-spark/spark-3-columnar-shuffle/src/main/scala/org/apache/spark/sql/execution/columnar/CelebornColumnBuilder.scala [334:369]
def apply(
dataType: DataType,
rowCnt: Int,
columnName: String,
encodingEnabled: Boolean,
encoder: Encoder[_ <: AtomicType]): CelebornColumnBuilder = {
val builder: CelebornColumnBuilder = dataType match {
case ByteType => new CelebornByteColumnBuilder
case BooleanType => new CelebornBooleanColumnBuilder
case ShortType => new CelebornShortColumnBuilder
case IntegerType =>
val builder = new CelebornIntColumnBuilder
builder.init(encoder.asInstanceOf[Encoder[IntegerType.type]])
builder
case LongType =>
val builder = new CelebornLongColumnBuilder
builder.init(encoder.asInstanceOf[Encoder[LongType.type]])
builder
case FloatType => new CelebornFloatColumnBuilder
case DoubleType => new CelebornDoubleColumnBuilder
case StringType =>
val builder = new CelebornStringColumnBuilder
builder.init(encoder.asInstanceOf[Encoder[StringType.type]])
builder
case dt: DecimalType if dt.precision <= Decimal.MAX_INT_DIGITS =>
new CelebornCompactMiniDecimalColumnBuilder(dt)
case dt: DecimalType if dt.precision <= Decimal.MAX_LONG_DIGITS =>
new CelebornCompactDecimalColumnBuilder(dt)
case dt: DecimalType => new CelebornDecimalColumnBuilder(dt)
case other =>
throw new Exception(s"Unsupported type: $other")
}
builder.initialize(rowCnt, columnName, encodingEnabled)
builder
}