in spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/Utils.scala [40:80]
def quote(colName: String): String = s"`$colName`"
/**
* compile a filter to Doris FE filter format.
* @param filter filter to be compile
* @param dialect jdbc dialect to translate value to sql format
* @param inValueLengthLimit max length of in value array
* @return if Doris FE can handle this filter, return None if Doris FE can not handled it.
*/
def compileFilter(filter: Filter, dialect: JdbcDialect, inValueLengthLimit: Int): Option[String] = {
Option(filter match {
case EqualTo(attribute, value) => s"${quote(attribute)} = ${compileValue(value)}"
case GreaterThan(attribute, value) => s"${quote(attribute)} > ${compileValue(value)}"
case GreaterThanOrEqual(attribute, value) => s"${quote(attribute)} >= ${compileValue(value)}"
case LessThan(attribute, value) => s"${quote(attribute)} < ${compileValue(value)}"
case LessThanOrEqual(attribute, value) => s"${quote(attribute)} <= ${compileValue(value)}"
case In(attribute, values) =>
if (values.isEmpty || values.length >= inValueLengthLimit) {
null
} else {
s"${quote(attribute)} in (${compileValue(values)})"
}
case IsNull(attribute) => s"${quote(attribute)} is null"
case IsNotNull(attribute) => s"${quote(attribute)} is not null"
case And(left, right) =>
val and = Seq(left, right).flatMap(compileFilter(_, dialect, inValueLengthLimit))
if (and.size == 2) {
and.map(p => s"($p)").mkString(" and ")
} else {
null
}
case Or(left, right) =>
val or = Seq(left, right).flatMap(compileFilter(_, dialect, inValueLengthLimit))
if (or.size == 2) {
or.map(p => s"($p)").mkString(" or ")
} else {
null
}
case _ => null
})
}