in spark/spark-tensorflow-connector/src/main/scala/org/tensorflow/spark/datasources/tfrecords/serde/DefaultTfRecordRowDecoder.scala [109:130]
private def decodeFeature(feature: Feature, schema: StructType, fieldIndex: Int): Any = {
val colDataType = schema.fields(fieldIndex).dataType
colDataType match {
case IntegerType => IntFeatureDecoder.decode(feature)
case LongType => LongFeatureDecoder.decode(feature)
case FloatType => FloatFeatureDecoder.decode(feature)
case DoubleType => DoubleFeatureDecoder.decode(feature)
case DecimalType() => DecimalFeatureDecoder.decode(feature)
case StringType => StringFeatureDecoder.decode(feature)
case BinaryType => BinaryFeatureDecoder.decode(feature)
case ArrayType(IntegerType, _) => IntListFeatureDecoder.decode(feature)
case ArrayType(LongType, _) => LongListFeatureDecoder.decode(feature)
case ArrayType(FloatType, _) => FloatListFeatureDecoder.decode(feature)
case ArrayType(DoubleType, _) => DoubleListFeatureDecoder.decode(feature)
case ArrayType(DecimalType(), _) => DecimalListFeatureDecoder.decode(feature)
case ArrayType(StringType, _) => StringListFeatureDecoder.decode(feature)
case ArrayType(BinaryType, _) => BinaryListFeatureDecoder.decode(feature)
case VectorType => Vectors.dense(DoubleListFeatureDecoder.decode(feature).toArray)
case _ => throw new scala.RuntimeException(s"Cannot convert Feature to unsupported data type ${colDataType}")
}
}