def toUnsafeSqlType()

in connector/src/main/scala/org/apache/spark/sql/cassandra/CassandraSQLRow.scala [112:134]


  def toUnsafeSqlType(value: Any): AnyRef = {
    val sparkSqlType: PartialFunction[Any, AnyRef] = {
      case geom: Geometry => geom.asWellKnownText()
      case duration: CqlDuration => TypeCodecs.DURATION.format(duration)
      case javaDate: java.util.Date => java.sql.Timestamp.from(javaDate.toInstant)
      case localDate: java.time.LocalDate => java.sql.Date.valueOf(localDate)
      case localDate: org.joda.time.LocalDate =>
        java.time.LocalDate.of(localDate.getYear,localDate.getMonthOfYear, localDate.getDayOfMonth)
          .toEpochDay.asInstanceOf[AnyRef]
      case inetAddress: InetAddress => inetAddress.getHostAddress
      case uuid: UUID => uuid.toString
      case set: Set[_] => set.map(toUnsafeSqlType).toSeq
      case list: List[_] => list.map(toUnsafeSqlType)
      case map: Map[_, _] => map map { case(k, v) => (toUnsafeSqlType(k), toUnsafeSqlType(v))}
      case udt: UDTValue => UDTValue(udt.metaData, udt.columnValues.map(toUnsafeSqlType))
      case tupleValue: TupleValue => TupleValue(tupleValue.values.map(toUnsafeSqlType): _*)
      case dateRange: DateRange => dateRange.toString
      case time: LocalTime => time.toNanoOfDay: java.lang.Long
      case instant: Instant => java.sql.Timestamp.from(instant)
      case _ => value.asInstanceOf[AnyRef]
    }
    sparkSqlType(value)
  }