private def buildScan()

in spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala [181:204]


  private def buildScan(
      range: Range,
      filter: Option[SparkSQLPushDownFilter],
      columns: Seq[Field]): Scan = {
    val scan = (range.lower, range.upper) match {
      case (Some(Bound(a, b)), Some(Bound(c, d))) => new Scan(a, c)
      case (None, Some(Bound(c, d))) => new Scan(Array[Byte](), c)
      case (Some(Bound(a, b)), None) => new Scan(a)
      case (None, None) => new Scan()
    }
    handleTimeSemantics(scan)

    columns.foreach {
      d =>
        if (!d.isRowKey) {
          scan.addColumn(d.cfBytes, d.colBytes)
        }
    }
    scan.setCacheBlocks(relation.blockCacheEnable)
    scan.setBatch(relation.batchNum)
    scan.setCaching(relation.cacheSize)
    filter.foreach(scan.setFilter(_))
    scan
  }