def serialize()

in spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala [423:441]


  def serialize(input: Any, schema: Schema): Array[Byte] = {
    schema.getType match {
      case BOOLEAN => Bytes.toBytes(input.asInstanceOf[Boolean])
      case BYTES | FIXED => input.asInstanceOf[Array[Byte]]
      case DOUBLE => Bytes.toBytes(input.asInstanceOf[Double])
      case FLOAT => Bytes.toBytes(input.asInstanceOf[Float])
      case INT => Bytes.toBytes(input.asInstanceOf[Int])
      case LONG => Bytes.toBytes(input.asInstanceOf[Long])
      case STRING => Bytes.toBytes(input.asInstanceOf[String])
      case RECORD =>
        val gr = input.asInstanceOf[GenericRecord]
        val writer2 = new GenericDatumWriter[GenericRecord](schema)
        val bao2 = new ByteArrayOutputStream()
        val encoder2: BinaryEncoder = EncoderFactory.get().directBinaryEncoder(bao2, null)
        writer2.write(gr, encoder2)
        bao2.toByteArray()
      case _ => throw new Exception(s"unsupported data type ${schema.getType}") // TODO
    }
  }