in spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.scala [210:264]
override def encode(dt: DataType, value: Any): Array[Byte] = {
dt match {
case BooleanType =>
val result = new Array[Byte](Bytes.SIZEOF_BOOLEAN + 1)
result(0) = BooleanEnc
value.asInstanceOf[Boolean] match {
case true => result(1) = -1: Byte
case false => result(1) = 0: Byte
}
result
case ShortType =>
val result = new Array[Byte](Bytes.SIZEOF_SHORT + 1)
result(0) = ShortEnc
Bytes.putShort(result, 1, value.asInstanceOf[Short])
result
case IntegerType =>
val result = new Array[Byte](Bytes.SIZEOF_INT + 1)
result(0) = IntEnc
Bytes.putInt(result, 1, value.asInstanceOf[Int])
result
case LongType | TimestampType =>
val result = new Array[Byte](Bytes.SIZEOF_LONG + 1)
result(0) = LongEnc
Bytes.putLong(result, 1, value.asInstanceOf[Long])
result
case FloatType =>
val result = new Array[Byte](Bytes.SIZEOF_FLOAT + 1)
result(0) = FloatEnc
Bytes.putFloat(result, 1, value.asInstanceOf[Float])
result
case DoubleType =>
val result = new Array[Byte](Bytes.SIZEOF_DOUBLE + 1)
result(0) = DoubleEnc
Bytes.putDouble(result, 1, value.asInstanceOf[Double])
result
case BinaryType =>
val v = value.asInstanceOf[Array[Bytes]]
val result = new Array[Byte](v.length + 1)
result(0) = BinaryEnc
System.arraycopy(v, 0, result, 1, v.length)
result
case StringType =>
val bytes = Bytes.toBytes(value.asInstanceOf[String])
val result = new Array[Byte](bytes.length + 1)
result(0) = StringEnc
System.arraycopy(bytes, 0, result, 1, bytes.length)
result
case _ =>
val bytes = Bytes.toBytes(value.toString)
val result = new Array[Byte](bytes.length + 1)
result(0) = UnknownEnc
System.arraycopy(bytes, 0, result, 1, bytes.length)
result
}
}