def addRange()

in spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala [45:69]


  def addRange(r: ScanRange) = {
    val lower = if (r.lowerBound != null && r.lowerBound.length > 0) {
      Some(Bound(r.lowerBound, r.isLowerBoundEqualTo))
    } else {
      None
    }
    val upper = if (r.upperBound != null && r.upperBound.length > 0) {
      if (!r.isUpperBoundEqualTo) {
        Some(Bound(r.upperBound, false))
      } else {

        // HBase stopRow is exclusive: therefore it DOESN'T act like isUpperBoundEqualTo
        // by default.  So we need to add a new max byte to the stopRow key
        val newArray = new Array[Byte](r.upperBound.length + 1)
        System.arraycopy(r.upperBound, 0, newArray, 0, r.upperBound.length)

        // New Max Bytes
        newArray(r.upperBound.length) = ByteMin
        Some(Bound(newArray, false))
      }
    } else {
      None
    }
    ranges :+= Range(lower, upper)
  }