in connector/src/main/scala/com/datastax/spark/connector/datasource/ScanHelper.scala [42:68]
def checkColumnsExistence(columns: Seq[ColumnRef], tableDef: TableDef): Seq[ColumnRef] = {
val allColumnNames = tableDef.columns.map(_.columnName).toSet
val regularColumnNames = tableDef.regularColumns.map(_.columnName).toSet
val keyspaceName = tableDef.keyspaceName
val tableName = tableDef.tableName
def checkSingleColumn(column: ColumnRef) = {
column match {
case ColumnName(columnName, _) =>
if (!allColumnNames.contains(columnName))
throw new IOException(s"Column $column not found in table $keyspaceName.$tableName")
case TTL(columnName, _) =>
if (!regularColumnNames.contains(columnName))
throw new IOException(s"TTL can be obtained only for regular columns, " +
s"but column $columnName is not a regular column in table $keyspaceName.$tableName.")
case WriteTime(columnName, _) =>
if (!regularColumnNames.contains(columnName))
throw new IOException(s"TTL can be obtained only for regular columns, " +
s"but column $columnName is not a regular column in table $keyspaceName.$tableName.")
case _ =>
}
column
}
columns.map(checkSingleColumn)
}