def apply()

in spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableCatalog.scala [247:280]


  def apply(params: Map[String, String]): HBaseTableCatalog = {
    val parameters = convert(params)
    //  println(jString)
    val jString = parameters(tableCatalog)
    implicit val formats: Formats = DefaultFormats
    val map = JsonMethods.parse(jString)
    val tableMeta = map \ table
    val nSpace = (tableMeta \ nameSpace).extractOrElse("default")
    val tName = (tableMeta \ tableName).extract[String]
    val cIter = (map \ columns).extract[Map[String, Map[String, String]]]
    val schemaMap = mutable.HashMap.empty[String, Field]
    cIter.foreach {
      case (name, column) =>
        val sd = {
          column
            .get(serdes)
            .asInstanceOf[Option[String]]
            .map(n => Class.forName(n).newInstance().asInstanceOf[SerDes])
        }
        val len = column.get(length).map(_.toInt).getOrElse(-1)
        val sAvro = column.get(avro).map(parameters(_))
        val f = Field(
          name,
          column.getOrElse(cf, rowKey),
          column.get(col).get,
          column.get(`type`),
          sAvro,
          sd,
          len)
        schemaMap.+=((name, f))
    }
    val rKey = RowKey((map \ rowKey).extract[String])
    HBaseTableCatalog(nSpace, tName, rKey, SchemaMap(schemaMap), parameters)
  }