def convertStringToResultValueBasedOnKylinSQLType()

in src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala [124:149]


  def convertStringToResultValueBasedOnKylinSQLType(s: Any, dataTp: DataType): Any = {
    if (s == null) {
      null
    } else {
      dataTp.getName match {
        case "decimal" => new java.math.BigDecimal(s.toString)
        case "date" => new java.sql.Date(DateFormat.stringToMillis(s.toString))
        case "time" | "timestamp" | "datetime" =>
          val l = DateFormat.stringToMillis(s.toString)
          Timestamp.valueOf(DateFormat.castTimestampToString(l))
        case "tinyint" => s.toString.toByte
        case "smallint" => s.toString.toShort
        case "integer" => s.toString.toInt
        case "int4" => s.toString.toInt
        case "bigint" => s.toString.toLong
        case "long8" => s.toString.toLong
        case "float" => java.lang.Float.parseFloat(s.toString)
        case "double" => java.lang.Double.parseDouble(s.toString)
        case tp if tp.startsWith("varchar") => s.toString
        case tp if tp.startsWith("char") => s.toString
        case "boolean" => java.lang.Boolean.parseBoolean(s.toString)
        case "array<string>" => s.toString.split(COMMA)
        case noSupport => throw new IllegalArgumentException(s"No supported data type: $noSupport")
      }
    }
  }