in sagemaker-spark-sdk/src/main/scala/com/amazonaws/services/sagemaker/sparksdk/protobuf/ProtobufConverter.scala [191:213]
private def setFeatures(protobufBuilder: Record.Builder,
vector: Vector): Record.Builder = {
val featuresTensorBuilder = Value.newBuilder().getFloat32TensorBuilder()
val featuresTensor = vector match {
case v: DenseVector =>
for (value <- v.values) {
featuresTensorBuilder.addValues(value.toFloat)
}
featuresTensorBuilder.build()
case v: SparseVector =>
featuresTensorBuilder.addShape(v.size)
for (i <- 0 until v.indices.length) {
featuresTensorBuilder.addKeys(v.indices(i))
featuresTensorBuilder.addValues(v.values(i).toFloat)
}
featuresTensorBuilder.build()
}
val featuresValue = Value.newBuilder().setFloat32Tensor(featuresTensor).build
val mapEntry = MapEntry.newBuilder().setKey(ValuesIdentifierString)
.setValue(featuresValue).build
protobufBuilder.addFeatures(mapEntry)
}