in spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/BulkLoadPartitioner.scala [35:60]
override def numPartitions: Int = if (startKeys.length == 0) 1 else startKeys.length
override def getPartition(key: Any): Int = {
val comparator: Comparator[Array[Byte]] = new Comparator[Array[Byte]] {
override def compare(o1: Array[Byte], o2: Array[Byte]): Int = {
Bytes.compareTo(o1, o2)
}
}
val rowKey: Array[Byte] =
key match {
case qualifier: KeyFamilyQualifier =>
qualifier.rowKey
case wrapper: ByteArrayWrapper =>
wrapper.value
case _ =>
key.asInstanceOf[Array[Byte]]
}
var partition = util.Arrays.binarySearch(startKeys, rowKey, comparator)
if (partition < 0)
partition = partition * -1 + -2
if (partition < 0)
partition = 0
partition
}