in driver/src/main/scala/com/datastax/spark/connector/mapper/DefaultColumnMapper.scala [137:172]
override def newTable(
keyspaceName: String,
tableName: String,
protocolVersion: ProtocolVersion = ProtocolVersion.DEFAULT): TableDef = {
// filter out inherited scala getters, because they are very likely
// not the properties users want to map
val inheritedScalaGetterNames = inheritedScalaGetters.map(_._1)
val paramNames = constructorParams.map(_._1)
val getterNames = getters.map(_._1).filterNot(inheritedScalaGetterNames.toSet.contains)
val setterNames = setters.map(_._1).map(setterNameToPropertyName)
val propertyNames = (paramNames ++ getterNames ++ setterNames)
.distinct
.filterNot(_.contains("$")) // ignore any properties generated by Scala compiler
// pick only those properties which we know Cassandra data type for:
val getterTypes = getters.toMap
val mappableProperties = propertyNames
.map { name => (name, getterTypes(name)) }
.map { case (name, tpe) => (name, Try(ColumnType.fromScalaType(tpe, protocolVersion))) }
.collect { case (name, Success(columnType)) => (name, columnType) }
require(
mappableProperties.nonEmpty,
"No mappable properties found in class: " + tpe.toString)
val columns =
for ((property, i) <- mappableProperties.zipWithIndex) yield {
val propertyName = property._1
val columnType = property._2
val columnName = ColumnMapperConvention.camelCaseToUnderscore(propertyName)
val columnRole = if (i == 0) PartitionKeyColumn else RegularColumn
ColumnDef(columnName, columnRole, columnType)
}
TableDef(keyspaceName, tableName, Seq(columns.head), Seq.empty, columns.tail)
}