in modules/spark-ext/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/SimpleExpressions.scala [52:122]
override def toString(expr: Expression, childToString: Expression ⇒ String, useQualifier: Boolean,
useAlias: Boolean, caseSensitive: Boolean): Option[String] = expr match {
case l: Literal ⇒
if (l.value == null)
Some("null")
else {
l.dataType match {
case StringType ⇒
Some("'" + l.value.toString + "'")
case TimestampType ⇒
l.value match {
//Internal representation of TimestampType is Long.
//So we converting from internal spark representation to CAST call.
case date: Long ⇒
Some(s"CAST('${timestampFormat.get.format(DateTimeUtils.toJavaTimestamp(date))}' " +
s"AS TIMESTAMP)")
case _ ⇒
Some(l.value.toString)
}
case DateType ⇒
l.value match {
//Internal representation of DateType is Int.
//So we converting from internal spark representation to CAST call.
case days: Integer ⇒
val date = new java.util.Date(DateTimeUtils.microsToMillis(
DateTimeUtils.daysToMicros(days, ZoneOffset.UTC)))
Some(s"CAST('${dateFormat.get.format(date)}' AS DATE)")
case _ ⇒
Some(l.value.toString)
}
case _ ⇒
Some(l.value.toString)
}
}
case ar: AttributeReference ⇒
val name =
if (useQualifier)
ar.qualifier
.map(quoteStringIfNeeded(_, caseSensitive))
.map(_ + "." + quoteStringIfNeeded(ar.name, caseSensitive))
.find(_ => true).getOrElse(ar.name)
else
quoteStringIfNeeded(ar.name, caseSensitive)
if (ar.metadata.contains(ALIAS) &&
!isAliasEqualColumnName(ar.metadata.getString(ALIAS), ar.name) &&
useAlias) {
Some(aliasToString(name, ar.metadata.getString(ALIAS)))
} else
Some(name)
case Alias(child, name) ⇒
if (useAlias)
Some(childToString(child)).map(aliasToString(_, name))
else
Some(childToString(child))
case Cast(child, dataType, _, _) ⇒
Some(s"CAST(${childToString(child)} AS ${toSqlType(dataType)})")
case SortOrder(child, direction, _, _) ⇒
Some(s"${childToString(child)}${if(direction==Descending) " DESC" else ""}")
case _ ⇒
None
}