private def parseString()

in spark/common/src/main/scala/org/apache/sedona/sql/utils/Adapter.scala [352:386]


  private def parseString(data: String, desiredType: DataType): Any = {
    // Spark needs to know how to serialize null values, so we have to provide the relevant class
    if (data == "null") {
      return desiredType match {
        case _: ByteType => null.asInstanceOf[Byte]
        case _: ShortType => null.asInstanceOf[Short]
        case _: IntegerType => null.asInstanceOf[Integer]
        case _: LongType => null.asInstanceOf[Long]
        case _: FloatType => null.asInstanceOf[Float]
        case _: DoubleType => null.asInstanceOf[Double]
        case _: DateType => null.asInstanceOf[Date]
        case _: TimestampType => null.asInstanceOf[Timestamp]
        case _: BooleanType => null.asInstanceOf[Boolean]
        case _: StringType => null.asInstanceOf[String]
        case _: BinaryType => null.asInstanceOf[Array[Byte]]
        case _: StructType => null.asInstanceOf[StructType]
      }
    }

    desiredType match {
      case _: ByteType => data.toByte
      case _: ShortType => data.toShort
      case _: IntegerType => data.toInt
      case _: LongType => data.toLong
      case _: FloatType => data.toFloat
      case _: DoubleType => data.toDouble
      case _: DateType => Date.valueOf(data)
      case _: TimestampType => Timestamp.valueOf(data)
      case _: BooleanType => data.toBoolean
      case _: StringType => data
      case _: StructType =>
        val desiredStructSchema = desiredType.asInstanceOf[StructType]
        new GenericRowWithSchema(parseStruct(data, desiredStructSchema), desiredStructSchema)
    }
  }