in src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/delta/SparkFilters.scala [43:79]
def convert(schema: StructType, filter: Filter): SExpression = filter match {
case _ if filter.getClass == classOf[AlwaysTrue] => SLiteral.True
case _ if filter.getClass == classOf[AlwaysFalse] => SLiteral.False
case eq: EqualTo => new SEqualTo(schema.column(unquote(eq.attribute)), literalConvert(eq.value))
case enq: EqualNullSafe => new SEqualTo(schema.column(unquote(enq.attribute)), literalConvert(enq.value))
case isNull: IsNull => new SIsNull(schema.column(unquote(isNull.attribute)))
case isNotNull: IsNotNull => new SIsNotNull(schema.column(unquote(isNotNull.attribute)))
case lt: LessThan => new SLessThan(schema.column(unquote(lt.attribute)), literalConvert(lt.value))
case lte: LessThanOrEqual => new SLessThanOrEqual(schema.column(unquote(lte.attribute)), literalConvert(lte.value))
case gt: GreaterThan => new SGreaterThan(schema.column(unquote(gt.attribute)), literalConvert(gt.value))
case gte: GreaterThanOrEqual => new SGreaterThanOrEqual(schema.column(unquote(gte.attribute)), literalConvert(gte.value))
case in: In =>
val inValues = in.values.map(v => literalConvert(v)).toList.asJava
new SIn(schema.column(unquote(in.attribute)), inValues)
case and: And =>
val left = convert(schema, and.left)
val right = convert(schema, and.right)
if (left != null && right != null) new SAnd(left, right) else null
case or: Or =>
val left = convert(schema, or.left)
val right = convert(schema, or.right)
if (left != null && right != null) new SOr(left, right) else null
case not: Not =>
if (not.child.isInstanceOf[StringContains]) {
SLiteral.True
} else {
new SNot(convert(schema, not.child))
}
case unsupportedFilter =>
// return 'true' to scan all partitions
// currently unsupported filters are:
// - StringStartsWith
// - StringEndsWith
// - StringContains
// - EqualNullSafe
SLiteral.True
}