in connector/src/main/scala/com/datastax/spark/connector/rdd/reader/FunctionBasedRowReader.scala [263:295]
override def read(row: Row, rowMetaData: CassandraRowMetadata) =
f(
a0c.convert(GettableData.get(row, 0, rowMetaData.codecs(0))),
a1c.convert(GettableData.get(row, 1, rowMetaData.codecs(1))),
a2c.convert(GettableData.get(row, 2, rowMetaData.codecs(2))),
a3c.convert(GettableData.get(row, 3, rowMetaData.codecs(3))),
a4c.convert(GettableData.get(row, 4, rowMetaData.codecs(4))),
a5c.convert(GettableData.get(row, 5, rowMetaData.codecs(5))),
a6c.convert(GettableData.get(row, 6, rowMetaData.codecs(6))),
a7c.convert(GettableData.get(row, 7, rowMetaData.codecs(7))),
a8c.convert(GettableData.get(row, 8, rowMetaData.codecs(8))),
a9c.convert(GettableData.get(row, 9, rowMetaData.codecs(9))),
a10c.convert(GettableData.get(row, 10, rowMetaData.codecs(10)))
)
}
class FunctionBasedRowReader12[R, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11]
(f: (A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11) => R)(
implicit
a0c: TypeConverter[A0],
a1c: TypeConverter[A1],
a2c: TypeConverter[A2],
a3c: TypeConverter[A3],
a4c: TypeConverter[A4],
a5c: TypeConverter[A5],
a6c: TypeConverter[A6],
a7c: TypeConverter[A7],
a8c: TypeConverter[A8],
a9c: TypeConverter[A9],
a10c: TypeConverter[A10],
a11c: TypeConverter[A11],
@transient override val ct: ClassTag[R])
extends FunctionBasedRowReader[R] {