in driver/src/main/scala/com/datastax/spark/connector/types/TupleType.scala [82:106]
override def newInstance(componentValues: Any*): TupleValue =
newInstance(defaultComponentConverters)(componentValues: _*)
private lazy val valuesSeqConverter = scala.util.Try(TypeConverter.forType[ValuesSeqAdapter]).toOption
def converterToCassandra(componentConverters: IndexedSeq[TypeConverter[_ <: AnyRef]]) = {
new TypeConverter[TupleValue] {
override def targetTypeTag = TupleValue.TypeTag
override def convertPF = {
case value if valuesSeqConverter.exists(_.convertPF.isDefinedAt(value)) =>
val values = valuesSeqConverter.get.convert(value).toSeq()
newInstance(componentConverters)(values: _*)
case x: TupleValue =>
newInstance(componentConverters)(x.columnValues: _*)
case x: Product => // converts from Scala tuples
newInstance(componentConverters)(x.productIterator.toIndexedSeq: _*)
case x: Pair[_, _] => // Java programmers may like this
newInstance(componentConverters)(x.getLeft, x.getRight)
case x: Triple[_, _, _] => // Java programmers may like this
newInstance(componentConverters)(x.getLeft, x.getMiddle, x.getRight)
}
}
}