def and()

in spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/Bound.scala [54:115]


  def and(r: Range, rs: Seq[Range]): Seq[Range] = {
    rs.flatMap {
      s =>
        val lower = s.lower
          .map {
            x =>
              // the scan has lower bound
              r.lower
                .map {
                  y =>
                    // the region has lower bound
                    if (ord.compare(x.b, y.b) < 0) {
                      // scan lower bound is smaller than region server lower bound
                      Some(y)
                    } else {
                      // scan low bound is greater or equal to region server lower bound
                      Some(x)
                    }
                }
                .getOrElse(Some(x))
          }
          .getOrElse(r.lower)

        val upper = s.upper
          .map {
            x =>
              // the scan has upper bound
              r.upper
                .map {
                  y =>
                    // the region has upper bound
                    if (ord.compare(x.b, y.b) >= 0) {
                      // scan upper bound is larger than server upper bound
                      // but region server scan stop is exclusive. It is OK here.
                      Some(y)
                    } else {
                      // scan upper bound is less or equal to region server upper bound
                      Some(x)
                    }
                }
                .getOrElse(Some(x))
          }
          .getOrElse(r.upper)

        val c = lower
          .map {
            case x =>
              upper
                .map {
                  case y =>
                    ord.compare(x.b, y.b)
                }
                .getOrElse(-1)
          }
          .getOrElse(-1)
        if (c < 0) {
          Some(Range(lower, upper))
        } else {
          None
        }
    }.seq
  }