in driver/src/main/scala/com/datastax/spark/connector/mapper/ReflectionColumnMapper.scala [67:103]
override def columnMapForReading(
struct: StructDef,
selectedColumns: IndexedSeq[ColumnRef]): ColumnMapForReading = {
val columnByName = columnRefByAliasName(selectedColumns)
def columnRefOrThrow(paramName: String) = {
constructorParamToColumnName(paramName, columnByName).getOrElse {
throw new IllegalArgumentException(
s"Failed to map constructor parameter $paramName in $cls to a column of ${struct.name}")
}
}
def columnsOf(ctor: Constructor[_]): Seq[ColumnRef] = {
if (isNoArgsConstructor(ctor))
Nil
else {
val paramNames = paranamer.lookupParameterNames(ctor)
paramNames
.filter(_ != "$_outer")
.filter(!_.startsWith("this$"))
.map(columnRefOrThrow)
}
}
val constructor = columnsOf(resolveConstructor(cls))
val setters: Map[String, ColumnRef] = {
for {
method <- cls.getMethods if isSetter(method)
methodName = method.getName
columnRef <- setterToColumnName(methodName, columnByName)
} yield (methodName, columnRef)
}.toMap
new SimpleColumnMapForReading(constructor, setters, allowsNull)
}