def parseRowKey()

in spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala [271:303]


  def parseRowKey(row: Array[Byte], keyFields: Seq[Field]): Map[Field, Any] = {
    keyFields
      .foldLeft((0, Seq[(Field, Any)]()))(
        (state, field) => {
          val idx = state._1
          val parsed = state._2
          if (field.length != -1) {
            val value = Utils.hbaseFieldToScalaType(field, row, idx, field.length)
            // Return the new index and appended value
            (idx + field.length, parsed ++ Seq((field, value)))
          } else {
            field.dt match {
              case StringType =>
                val pos = row.indexOf(HBaseTableCatalog.delimiter, idx)
                if (pos == -1 || pos > row.length) {
                  // this is at the last dimension
                  val value = Utils.hbaseFieldToScalaType(field, row, idx, row.length)
                  (row.length + 1, parsed ++ Seq((field, value)))
                } else {
                  val value = Utils.hbaseFieldToScalaType(field, row, idx, pos - idx)
                  (pos, parsed ++ Seq((field, value)))
                }
              // We don't know the length, assume it extends to the end of the rowkey.
              case _ =>
                (
                  row.length + 1,
                  parsed ++ Seq((field, Utils.hbaseFieldToScalaType(field, row, idx, row.length))))
            }
          }
        })
      ._2
      .toMap
  }