id: 1 unit: def transverseFilterTree() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 435 end line: 577 size: 126 LOC McCabe index: 24 number of parameters: 3 id: 2 unit: public static void main() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java start line: 131 end line: 263 size: 103 LOC McCabe index: 12 number of parameters: 1 id: 3 unit: def bulkLoadThinRows[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 746 end line: 878 size: 99 LOC McCabe index: 11 number of parameters: 3 id: 4 unit: def ranges() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.scala start line: 67 end line: 154 size: 88 LOC McCabe index: 16 number of parameters: 1 id: 5 unit: private def writeValueToHFile() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 962 end line: 1049 size: 82 LOC McCabe index: 9 number of parameters: 15 id: 6 unit: def createConverterToSQL() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala start line: 150 end line: 232 size: 81 LOC McCabe index: 32 number of parameters: 1 id: 7 unit: def bulkLoad[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 612 end line: 712 size: 80 LOC McCabe index: 9 number of parameters: 3 id: 8 unit: def createConverterToAvro() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala start line: 330 end line: 397 size: 67 LOC McCabe index: 18 number of parameters: 3 id: 9 unit: override def buildScan() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 311 end line: 392 size: 67 LOC McCabe index: 8 number of parameters: 2 id: 10 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/DataType.scala start line: 83 end line: 171 size: 65 LOC McCabe index: 4 number of parameters: 1 id: 11 unit: override def encode() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.scala start line: 173 end line: 228 size: 56 LOC McCabe index: 12 number of parameters: 2 id: 12 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/AvroSource.scala start line: 102 end line: 162 size: 52 LOC McCabe index: 6 number of parameters: 1 id: 13 unit: private def build() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 230 end line: 279 size: 50 LOC McCabe index: 14 number of parameters: 3 id: 14 unit: public static void checkForOrCreateReplicationPeer() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java start line: 307 end line: 361 size: 49 LOC McCabe index: 9 number of parameters: 6 id: 15 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkGetExample.scala start line: 37 end line: 96 size: 47 LOC McCabe index: 4 number of parameters: 1 id: 16 unit: def toSqlType() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala start line: 69 end line: 121 size: 46 LOC McCabe index: 22 number of parameters: 1 id: 17 unit: def mergeIntersect() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 1067 end line: 1114 size: 46 LOC McCabe index: 8 number of parameters: 1 id: 18 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkGetExample.scala start line: 37 end line: 93 size: 44 LOC McCabe index: 4 number of parameters: 1 id: 19 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseMapPartitionExample.scala start line: 36 end line: 92 size: 43 LOC McCabe index: 4 number of parameters: 1 id: 20 unit: public static SparkSQLPushDownFilter parseFrom() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java start line: 187 end line: 242 size: 42 LOC McCabe index: 5 number of parameters: 1 id: 21 unit: public static void main() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/DumpToStringListener.java start line: 57 end line: 103 size: 41 LOC McCabe index: 5 number of parameters: 1 id: 22 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseForeachPartitionExample.scala start line: 36 end line: 86 size: 41 LOC McCabe index: 2 number of parameters: 1 id: 23 unit: public static void main() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseMapGetPutExample.java start line: 50 end line: 97 size: 40 LOC McCabe index: 4 number of parameters: 1 id: 24 unit: override def insert() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 195 end line: 236 size: 40 LOC McCabe index: 6 number of parameters: 2 id: 25 unit: public ReturnCode filterCell() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java start line: 112 end line: 162 size: 39 LOC McCabe index: 6 number of parameters: 1 id: 26 unit: public void batch() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 139 end line: 183 size: 38 LOC McCabe index: 2 number of parameters: 2 id: 27 unit: private def getNewHFileWriter() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 889 end line: 940 size: 37 LOC McCabe index: 3 number of parameters: 8 id: 28 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutTimestampExample.scala start line: 35 end line: 78 size: 37 LOC McCabe index: 2 number of parameters: 1 id: 29 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExample.scala start line: 35 end line: 77 size: 37 LOC McCabe index: 2 number of parameters: 1 id: 30 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkPutExample.scala start line: 36 end line: 79 size: 36 LOC McCabe index: 2 number of parameters: 1 id: 31 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseStreamingBulkPutExample.scala start line: 36 end line: 76 size: 35 LOC McCabe index: 3 number of parameters: 1 id: 32 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExampleFromFile.scala start line: 39 end line: 78 size: 35 LOC McCabe index: 2 number of parameters: 1 id: 33 unit: def createTable() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 152 end line: 188 size: 34 LOC McCabe index: 3 number of parameters: 0 id: 34 unit: override def equals() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseConnectionCache.scala start line: 220 end line: 253 size: 34 LOC McCabe index: 15 number of parameters: 1 id: 35 unit: private def convertTypeToAvro[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala start line: 238 end line: 274 size: 33 LOC McCabe index: 16 number of parameters: 4 id: 36 unit: private def convertFieldTypeToAvro[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala start line: 281 end line: 317 size: 33 LOC McCabe index: 16 number of parameters: 4 id: 37 unit: def convert() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableCatalog.scala start line: 291 end line: 325 size: 33 LOC McCabe index: 4 number of parameters: 2 id: 38 unit: def and() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/Bound.scala start line: 52 end line: 94 size: 32 LOC McCabe index: 6 number of parameters: 2 id: 39 unit: override def compute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 218 end line: 249 size: 32 LOC McCabe index: 3 number of parameters: 2 id: 40 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/HBaseSource.scala start line: 73 end line: 108 size: 31 LOC McCabe index: 2 number of parameters: 1 id: 41 unit: def generateSchemaMappingMap() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableCatalog.scala start line: 335 end line: 367 size: 30 LOC McCabe index: 5 number of parameters: 1 id: 42 unit: override def filter() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.scala start line: 230 end line: 260 size: 29 LOC McCabe index: 7 number of parameters: 7 id: 43 unit: def hbaseFieldToScalaType() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/Utils.scala start line: 37 end line: 68 size: 29 LOC McCabe index: 14 number of parameters: 4 id: 44 unit: private def buildGets() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 109 end line: 137 size: 29 LOC McCabe index: 3 number of parameters: 5 id: 45 unit: public TableBuilder getTableBuilder() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 169 end line: 200 size: 28 LOC McCabe index: 2 number of parameters: 2 id: 46 unit: public static void main() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseStreamingBulkPutExample.java start line: 41 end line: 75 size: 27 LOC McCabe index: 2 number of parameters: 1 id: 47 unit: public static void main() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java start line: 52 end line: 89 size: 27 LOC McCabe index: 2 number of parameters: 1 id: 48 unit: public static void main() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkPutExample.java start line: 42 end line: 75 size: 27 LOC McCabe index: 2 number of parameters: 1 id: 49 unit: def get() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableCatalog.scala start line: 156 end line: 187 size: 27 LOC McCabe index: 7 number of parameters: 1 id: 50 unit: def incrementByteArray() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/Utils.scala start line: 94 end line: 123 size: 26 LOC McCabe index: 7 number of parameters: 1 id: 51 unit: private def toResultIterator() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 139 end line: 164 size: 26 LOC McCabe index: 5 number of parameters: 1 id: 52 unit: def apply() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableCatalog.scala start line: 242 end line: 268 size: 26 LOC McCabe index: 2 number of parameters: 2 id: 53 unit: def validate() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 762 end line: 791 size: 26 LOC McCabe index: 12 number of parameters: 3 id: 54 unit: def validate() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 1009 end line: 1038 size: 26 LOC McCabe index: 12 number of parameters: 3 id: 55 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkDeleteExample.scala start line: 35 end line: 65 size: 26 LOC McCabe index: 2 number of parameters: 1 id: 56 unit: public SparkSQLPushDownFilter() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java start line: 78 end line: 109 size: 25 LOC McCabe index: 3 number of parameters: 4 id: 57 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkDeleteExample.scala start line: 35 end line: 66 size: 25 LOC McCabe index: 2 number of parameters: 1 id: 58 unit: public static void main() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkDeleteExample.java start line: 42 end line: 73 size: 24 LOC McCabe index: 2 number of parameters: 1 id: 59 unit: public static void main() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkGetExample.java start line: 45 end line: 75 size: 24 LOC McCabe index: 2 number of parameters: 1 id: 60 unit: private def bulkMutation[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 310 end line: 334 size: 24 LOC McCabe index: 3 number of parameters: 3 id: 61 unit: private def closeHFileWriter() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 1088 end line: 1117 size: 24 LOC McCabe index: 3 number of parameters: 5 id: 62 unit: public String call() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkGetExample.java start line: 90 end line: 114 size: 23 LOC McCabe index: 3 number of parameters: 1 id: 63 unit: private def toResultIterator() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 188 end line: 210 size: 23 LOC McCabe index: 3 number of parameters: 1 id: 64 unit: def parseRowKey() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 251 end line: 276 size: 23 LOC McCabe index: 6 number of parameters: 2 id: 65 unit: def buildPushDownPredicatesResource() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 394 end line: 421 size: 23 LOC McCabe index: 3 number of parameters: 1 id: 66 unit: override def numPartitions: Int = if() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/BulkLoadPartitioner.scala start line: 37 end line: 62 size: 23 LOC McCabe index: 7 number of parameters: 1 id: 67 unit: public boolean equals() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java start line: 279 end line: 300 size: 22 LOC McCabe index: 8 number of parameters: 1 id: 68 unit: public static void setupZookeeperZnodes() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java start line: 272 end line: 295 size: 21 LOC McCabe index: 3 number of parameters: 3 id: 69 unit: public static void main() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseDistributedScan.java start line: 44 end line: 72 size: 21 LOC McCabe index: 2 number of parameters: 1 id: 70 unit: private def buildScan() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 166 end line: 187 size: 21 LOC McCabe index: 6 number of parameters: 3 id: 71 unit: def mergeIntersect() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 819 end line: 841 size: 21 LOC McCabe index: 3 number of parameters: 1 id: 72 unit: static file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java start line: 79 end line: 99 size: 20 LOC McCabe index: 1 number of parameters: 0 id: 73 unit: public void parseRule() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 166 end line: 185 size: 20 LOC McCabe index: 5 number of parameters: 3 id: 74 unit: def toBytes() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/Utils.scala start line: 71 end line: 91 size: 20 LOC McCabe index: 13 number of parameters: 2 id: 75 unit: def addRange() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 47 end line: 71 size: 20 LOC McCabe index: 6 number of parameters: 1 id: 76 unit: def run() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 542 end line: 564 size: 20 LOC McCabe index: 4 number of parameters: 2 id: 77 unit: def main() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseDistributedScanExample.scala start line: 33 end line: 60 size: 20 LOC McCabe index: 2 number of parameters: 1 id: 78 unit: def performHousekeeping() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseConnectionCache.scala start line: 90 end line: 110 size: 20 LOC McCabe index: 6 number of parameters: 1 id: 79 unit: def serialize() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala start line: 403 end line: 421 size: 19 LOC McCabe index: 10 number of parameters: 2 id: 80 unit: private def handleTimeSemantics() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 251 end line: 273 size: 19 LOC McCabe index: 11 number of parameters: 1 id: 81 unit: def getOverLapScanRange() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 660 end line: 682 size: 19 LOC McCabe index: 4 number of parameters: 1 id: 82 unit: private void startKafkaConnection() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 94 end line: 113 size: 18 LOC McCabe index: 2 number of parameters: 0 id: 83 unit: private def getConf() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 488 end line: 507 size: 18 LOC McCabe index: 6 number of parameters: 1 id: 84 unit: def buildRow() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 278 end line: 297 size: 18 LOC McCabe index: 5 number of parameters: 2 id: 85 unit: private static void printUsageAndExit() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java start line: 101 end line: 117 size: 17 LOC McCabe index: 1 number of parameters: 2 id: 86 unit: public void reloadIfFile() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 102 end line: 124 size: 17 LOC McCabe index: 3 number of parameters: 0 id: 87 unit: private def convertStructToAvro[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala start line: 127 end line: 144 size: 17 LOC McCabe index: 2 number of parameters: 3 id: 88 unit: def and() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/Bound.scala start line: 99 end line: 119 size: 17 LOC McCabe index: 4 number of parameters: 2 id: 89 unit: def hbaseRDD[U: ClassTag]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 428 end line: 448 size: 17 LOC McCabe index: 1 number of parameters: 4 id: 90 unit: private def rollWriters() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 1060 end line: 1077 size: 17 LOC McCabe index: 2 number of parameters: 6 id: 91 unit: def message() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/DataType.scala start line: 33 end line: 51 size: 17 LOC McCabe index: 4 number of parameters: 2 id: 92 unit: public void setQualifier() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 112 end line: 128 size: 16 LOC McCabe index: 6 number of parameters: 1 id: 93 unit: private ProducerRecord toByteArray() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 122 end line: 136 size: 15 LOC McCabe index: 2 number of parameters: 3 id: 94 unit: public void parseRules() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 144 end line: 158 size: 15 LOC McCabe index: 4 number of parameters: 3 id: 95 unit: public static boolean endsWith() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 159 end line: 177 size: 15 LOC McCabe index: 5 number of parameters: 2 id: 96 unit: def bulkGet[T, U: ClassTag]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 366 end line: 382 size: 15 LOC McCabe index: 1 number of parameters: 4 id: 97 unit: private def hbaseMapPartition[K, U]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 513 end line: 529 size: 15 LOC McCabe index: 2 number of parameters: 4 id: 98 unit: def apply() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/AvroSource.scala start line: 55 end line: 70 size: 15 LOC McCabe index: 1 number of parameters: 1 id: 99 unit: public boolean qualifierMatch() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 60 end line: 74 size: 14 LOC McCabe index: 7 number of parameters: 1 id: 100 unit: public static boolean startsWith() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 136 end line: 151 size: 14 LOC McCabe index: 5 number of parameters: 2 id: 101 unit: def init() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 55 end line: 68 size: 14 LOC McCabe index: 4 number of parameters: 0 id: 102 unit: def bulkLoad[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 308 end line: 321 size: 14 LOC McCabe index: 1 number of parameters: 10 id: 103 unit: def streamBulkGet[T, U: ClassTag]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 401 end line: 416 size: 14 LOC McCabe index: 1 number of parameters: 4 id: 104 unit: def mergeIntersect() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 600 end line: 616 size: 14 LOC McCabe index: 9 number of parameters: 1 id: 105 unit: def hbaseBulkLoadThinRows() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseRDDFunctions.scala start line: 238 end line: 251 size: 14 LOC McCabe index: 1 number of parameters: 3 id: 106 unit: override def equals() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ColumnFamilyQualifierMapKeyWrapper.scala start line: 43 end line: 57 size: 14 LOC McCabe index: 2 number of parameters: 1 id: 107 unit: def close() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseConnectionCache.scala start line: 75 end line: 88 size: 14 LOC McCabe index: 4 number of parameters: 0 id: 108 unit: def releaseOnException[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 77 end line: 89 size: 13 LOC McCabe index: 3 number of parameters: 1 id: 109 unit: def bulkGet[T, U]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 247 end line: 261 size: 13 LOC McCabe index: 1 number of parameters: 7 id: 110 unit: def bulkLoadThinRows[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 339 end line: 351 size: 13 LOC McCabe index: 1 number of parameters: 10 id: 111 unit: def bulkPut[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 217 end line: 230 size: 13 LOC McCabe index: 1 number of parameters: 3 id: 112 unit: private def hbaseForeachPartition[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 471 end line: 486 size: 13 LOC McCabe index: 2 number of parameters: 4 id: 113 unit: def mergeUnion() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 1046 end line: 1059 size: 13 LOC McCabe index: 3 number of parameters: 1 id: 114 unit: def hbaseBulkLoad() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseRDDFunctions.scala start line: 193 end line: 205 size: 13 LOC McCabe index: 1 number of parameters: 3 id: 115 unit: public KafkaTableForBridge() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 78 end line: 89 size: 12 LOC McCabe index: 1 number of parameters: 5 id: 116 unit: public Pair call() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java start line: 94 end line: 107 size: 12 LOC McCabe index: 3 number of parameters: 1 id: 117 unit: public boolean filterRow() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java start line: 166 end line: 178 size: 12 LOC McCabe index: 2 number of parameters: 0 id: 118 unit: def mergeUnion() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 623 end line: 638 size: 12 LOC McCabe index: 9 number of parameters: 1 id: 119 unit: def mergeUnion() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 799 end line: 811 size: 12 LOC McCabe index: 3 number of parameters: 1 id: 120 unit: def apply() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/HBaseSource.scala start line: 41 end line: 52 size: 12 LOC McCabe index: 1 number of parameters: 1 id: 121 unit: override def run() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseConnectionCache.scala start line: 50 end line: 63 size: 12 LOC McCabe index: 5 number of parameters: 0 id: 122 unit: def streamBulkGet[T, U]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 280 end line: 290 size: 11 LOC McCabe index: 1 number of parameters: 7 id: 123 unit: def apply() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/DataType.scala start line: 52 end line: 62 size: 11 LOC McCabe index: 1 number of parameters: 1 id: 124 unit: def hbaseBulkGet() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseRDDFunctions.scala start line: 102 end line: 112 size: 11 LOC McCabe index: 3 number of parameters: 4 id: 125 unit: def getConnection() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseConnectionCache.scala start line: 113 end line: 123 size: 11 LOC McCabe index: 2 number of parameters: 2 id: 126 unit: def getTable() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseConnectionCache.scala start line: 142 end line: 152 size: 11 LOC McCabe index: 2 number of parameters: 1 id: 127 unit: public SparkSQLPushDownFilter() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java start line: 67 end line: 76 size: 10 LOC McCabe index: 1 number of parameters: 5 id: 128 unit: override def destroy() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 102 end line: 111 size: 10 LOC McCabe index: 3 number of parameters: 0 id: 129 unit: override def destroy() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 142 end line: 151 size: 10 LOC McCabe index: 3 number of parameters: 0 id: 130 unit: def hbaseRDD[U]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 363 end line: 372 size: 10 LOC McCabe index: 1 number of parameters: 4 id: 131 unit: override def createRelation() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 73 end line: 82 size: 10 LOC McCabe index: 1 number of parameters: 5 id: 132 unit: def mergeIntersect() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 914 end line: 923 size: 10 LOC McCabe index: 2 number of parameters: 1 id: 133 unit: override def execute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 116 end line: 126 size: 10 LOC McCabe index: 2 number of parameters: 3 id: 134 unit: def getConnection() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseConnectionCache.scala start line: 125 end line: 136 size: 10 LOC McCabe index: 2 number of parameters: 1 id: 135 unit: public boolean isExclude() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 194 end line: 202 size: 9 LOC McCabe index: 3 number of parameters: 3 id: 136 unit: public List getTopics() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 211 end line: 220 size: 9 LOC McCabe index: 3 number of parameters: 3 id: 137 unit: def encode() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/JavaBytesEncoder.scala start line: 59 end line: 96 size: 9 LOC McCabe index: 1 number of parameters: 2 id: 138 unit: def create() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/JavaBytesEncoder.scala start line: 106 end line: 114 size: 9 LOC McCabe index: 3 number of parameters: 1 id: 139 unit: def getField() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableCatalog.scala start line: 136 end line: 147 size: 9 LOC McCabe index: 2 number of parameters: 1 id: 140 unit: private def streamBulkMutation[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 341 end line: 349 size: 9 LOC McCabe index: 1 number of parameters: 3 id: 141 unit: protected def initializeLogIfNecessary() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 96 end line: 104 size: 9 LOC McCabe index: 3 number of parameters: 1 id: 142 unit: def +=() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/FamiliesQualifiersValues.scala start line: 45 end line: 57 size: 9 LOC McCabe index: 2 number of parameters: 3 id: 143 unit: def execute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 62 end line: 70 size: 9 LOC McCabe index: 2 number of parameters: 3 id: 144 unit: override def execute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 136 end line: 145 size: 9 LOC McCabe index: 4 number of parameters: 3 id: 145 unit: public Put call() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseStreamingBulkPutExample.java start line: 81 end line: 89 size: 8 LOC McCabe index: 1 number of parameters: 1 id: 146 unit: def tryOrIOException() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SerializableConfiguration.scala start line: 39 end line: 46 size: 8 LOC McCabe index: 4 number of parameters: 1 id: 147 unit: def compare() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.scala start line: 156 end line: 163 size: 8 LOC McCabe index: 5 number of parameters: 2 id: 148 unit: def streamBulkDelete[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 222 end line: 229 size: 8 LOC McCabe index: 1 number of parameters: 5 id: 149 unit: def streamMapPartitions[T, U: ClassTag]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 196 end line: 203 size: 8 LOC McCabe index: 1 number of parameters: 3 id: 150 unit: def streamBulkPut[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 256 end line: 263 size: 8 LOC McCabe index: 1 number of parameters: 3 id: 151 unit: def containsPoint() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 716 end line: 725 size: 8 LOC McCabe index: 8 number of parameters: 1 id: 152 unit: def mergeUnion() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 885 end line: 892 size: 8 LOC McCabe index: 2 number of parameters: 2 id: 153 unit: def populateLatestExecutionRules() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 969 end line: 976 size: 8 LOC McCabe index: 2 number of parameters: 2 id: 154 unit: override def equals() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayComparable.scala start line: 41 end line: 48 size: 8 LOC McCabe index: 3 number of parameters: 1 id: 155 unit: override def equals() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayWrapper.scala start line: 36 end line: 43 size: 8 LOC McCabe index: 3 number of parameters: 1 id: 156 unit: override def compareTo() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/KeyFamilyQualifier.scala start line: 37 end line: 44 size: 8 LOC McCabe index: 3 number of parameters: 1 id: 157 unit: public KafkaBridgeConnection() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 65 end line: 71 size: 7 LOC McCabe index: 1 number of parameters: 3 id: 158 unit: private void setupRules() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 86 end line: 92 size: 7 LOC McCabe index: 1 number of parameters: 0 id: 159 unit: private static void printUsageAndExit() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/DumpToStringListener.java start line: 105 end line: 111 size: 7 LOC McCabe index: 1 number of parameters: 2 id: 160 unit: public void parseRules() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 130 end line: 136 size: 7 LOC McCabe index: 1 number of parameters: 1 id: 161 unit: public Put call() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkPutExample.java start line: 81 end line: 88 size: 7 LOC McCabe index: 1 number of parameters: 1 id: 162 unit: private def getSchemaBuilder() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala start line: 319 end line: 325 size: 7 LOC McCabe index: 2 number of parameters: 1 id: 163 unit: def deserialize() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SchemaConverters.scala start line: 423 end line: 429 size: 7 LOC McCabe index: 1 number of parameters: 2 id: 164 unit: def apply() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/Bound.scala start line: 38 end line: 44 size: 7 LOC McCabe index: 2 number of parameters: 1 id: 165 unit: def release() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 312 end line: 318 size: 7 LOC McCabe index: 1 number of parameters: 1 id: 166 unit: def toDataType = StructType() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableCatalog.scala start line: 148 end line: 154 size: 7 LOC McCabe index: 1 number of parameters: 1 id: 167 unit: def foreachPartition[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 63 end line: 70 size: 7 LOC McCabe index: 1 number of parameters: 3 id: 168 unit: def mapPartitions[T, R]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 109 end line: 115 size: 7 LOC McCabe index: 1 number of parameters: 3 id: 169 unit: def streamMap[T, U]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 137 end line: 143 size: 7 LOC McCabe index: 1 number of parameters: 3 id: 170 unit: def streamBulkPut[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 179 end line: 185 size: 7 LOC McCabe index: 1 number of parameters: 4 id: 171 unit: def applyCreds[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 232 end line: 240 size: 7 LOC McCabe index: 2 number of parameters: 0 id: 172 unit: def hbaseRDD() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 459 end line: 466 size: 7 LOC McCabe index: 1 number of parameters: 2 id: 173 unit: def hbaseBulkGet() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseDStreamFunctions.scala start line: 99 end line: 105 size: 7 LOC McCabe index: 1 number of parameters: 4 id: 174 unit: def execute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 38 end line: 44 size: 7 LOC McCabe index: 1 number of parameters: 3 id: 175 unit: override def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 84 end line: 90 size: 7 LOC McCabe index: 1 number of parameters: 1 id: 176 unit: override def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 103 end line: 109 size: 7 LOC McCabe index: 1 number of parameters: 1 id: 177 unit: public KafkaBridgeConnection() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 79 end line: 84 size: 6 LOC McCabe index: 1 number of parameters: 3 id: 178 unit: public void close() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 156 end line: 161 size: 6 LOC McCabe index: 2 number of parameters: 0 id: 179 unit: private boolean keep() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 110 end line: 115 size: 6 LOC McCabe index: 2 number of parameters: 1 id: 180 unit: public boolean match() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 47 end line: 53 size: 6 LOC McCabe index: 3 number of parameters: 3 id: 181 unit: public boolean columnFamilyMatch() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 81 end line: 86 size: 6 LOC McCabe index: 2 number of parameters: 1 id: 182 unit: public boolean tableMatch() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 93 end line: 98 size: 6 LOC McCabe index: 2 number of parameters: 1 id: 183 unit: def release() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 70 end line: 75 size: 6 LOC McCabe index: 2 number of parameters: 0 id: 184 unit: def foreachPartition[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 119 end line: 124 size: 6 LOC McCabe index: 1 number of parameters: 3 id: 185 unit: def mapPartitions[T, R: ClassTag]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 141 end line: 148 size: 6 LOC McCabe index: 1 number of parameters: 3 id: 186 unit: def streamBulkDelete[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 298 end line: 303 size: 6 LOC McCabe index: 1 number of parameters: 3 id: 187 unit: def compareRange() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 705 end line: 710 size: 6 LOC McCabe index: 7 number of parameters: 2 id: 188 unit: def hbaseBulkGet[R: ClassTag]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseDStreamFunctions.scala start line: 78 end line: 83 size: 6 LOC McCabe index: 1 number of parameters: 4 id: 189 unit: def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 47 end line: 54 size: 6 LOC McCabe index: 1 number of parameters: 1 id: 190 unit: override def execute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 77 end line: 82 size: 6 LOC McCabe index: 2 number of parameters: 3 id: 191 unit: override def execute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 97 end line: 102 size: 6 LOC McCabe index: 2 number of parameters: 3 id: 192 unit: override def execute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 154 end line: 160 size: 6 LOC McCabe index: 1 number of parameters: 3 id: 193 unit: override def execute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 209 end line: 219 size: 6 LOC McCabe index: 1 number of parameters: 3 id: 194 unit: def serialize() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SerDes.scala start line: 26 end line: 32 size: 5 LOC McCabe index: 1 number of parameters: 1 id: 195 unit: override def getPreferredLocations() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 103 end line: 107 size: 5 LOC McCabe index: 1 number of parameters: 1 id: 196 unit: override def equals() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableCatalog.scala start line: 103 end line: 107 size: 5 LOC McCabe index: 5 number of parameters: 1 id: 197 unit: def foreachPartition[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 85 end line: 89 size: 5 LOC McCabe index: 1 number of parameters: 3 id: 198 unit: def bulkPut[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 158 end line: 163 size: 5 LOC McCabe index: 1 number of parameters: 3 id: 199 unit: def hbaseRDD() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 382 end line: 386 size: 5 LOC McCabe index: 1 number of parameters: 2 id: 200 unit: def foreachPartition[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 100 end line: 104 size: 5 LOC McCabe index: 1 number of parameters: 3 id: 201 unit: override def createRelation() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 66 end line: 70 size: 5 LOC McCabe index: 1 number of parameters: 3 id: 202 unit: def mergeUnion() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 901 end line: 905 size: 5 LOC McCabe index: 1 number of parameters: 1 id: 203 unit: override def initialValue() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 940 end line: 944 size: 5 LOC McCabe index: 1 number of parameters: 0 id: 204 unit: private def initializeLogging() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 106 end line: 115 size: 5 LOC McCabe index: 1 number of parameters: 1 id: 205 unit: def hbaseBulkPut() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseRDDFunctions.scala start line: 57 end line: 61 size: 5 LOC McCabe index: 1 number of parameters: 3 id: 206 unit: def hbaseBulkGet[R: ClassTag]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseRDDFunctions.scala start line: 82 end line: 86 size: 5 LOC McCabe index: 1 number of parameters: 4 id: 207 unit: def hbaseMapPartitions[R: ClassTag]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseRDDFunctions.scala start line: 160 end line: 164 size: 5 LOC McCabe index: 1 number of parameters: 3 id: 208 unit: def hbaseBulkPut() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseDStreamFunctions.scala start line: 53 end line: 57 size: 5 LOC McCabe index: 1 number of parameters: 3 id: 209 unit: def hbaseBulkDelete() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseDStreamFunctions.scala start line: 118 end line: 122 size: 5 LOC McCabe index: 1 number of parameters: 3 id: 210 unit: def hbaseMapPartitions[R: ClassTag]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseDStreamFunctions.scala start line: 154 end line: 158 size: 5 LOC McCabe index: 1 number of parameters: 3 id: 211 unit: def cloneColumnFamily() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ColumnFamilyQualifierMapKeyWrapper.scala start line: 64 end line: 68 size: 5 LOC McCabe index: 1 number of parameters: 0 id: 212 unit: def cloneQualifier() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ColumnFamilyQualifierMapKeyWrapper.scala start line: 70 end line: 74 size: 5 LOC McCabe index: 1 number of parameters: 0 id: 213 unit: private CheckMutation addTopics() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 117 end line: 120 size: 4 LOC McCabe index: 1 number of parameters: 1 id: 214 unit: public TopicRoutingRules() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 93 end line: 96 size: 4 LOC McCabe index: 1 number of parameters: 1 id: 215 unit: public int hashCode() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java start line: 303 end line: 306 size: 4 LOC McCabe index: 1 number of parameters: 0 id: 216 unit: private def writeObject() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SerializableConfiguration.scala start line: 29 end line: 32 size: 4 LOC McCabe index: 1 number of parameters: 1 id: 217 unit: private def readObject() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SerializableConfiguration.scala start line: 34 end line: 37 size: 4 LOC McCabe index: 1 number of parameters: 1 id: 218 unit: def release() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 34 end line: 38 size: 4 LOC McCabe index: 2 number of parameters: 0 id: 219 unit: def release() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 39 end line: 42 size: 4 LOC McCabe index: 1 number of parameters: 0 id: 220 unit: override def init() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 97 end line: 100 size: 4 LOC McCabe index: 1 number of parameters: 0 id: 221 unit: override def init() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 137 end line: 140 size: 4 LOC McCabe index: 1 number of parameters: 0 id: 222 unit: override def serialize() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/SerDes.scala start line: 33 end line: 36 size: 4 LOC McCabe index: 1 number of parameters: 1 id: 223 unit: def parse() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/DataTypeParserWrapper.scala start line: 26 end line: 30 size: 4 LOC McCabe index: 1 number of parameters: 1 id: 224 unit: def bulkDelete[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/JavaHBaseContext.scala start line: 202 end line: 205 size: 4 LOC McCabe index: 1 number of parameters: 5 id: 225 unit: def streamForeachPartition[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 170 end line: 174 size: 4 LOC McCabe index: 1 number of parameters: 3 id: 226 unit: def bulkDelete[T]() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala start line: 278 end line: 281 size: 4 LOC McCabe index: 1 number of parameters: 3 id: 227 unit: def getFreshByteRange() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 951 end line: 954 size: 4 LOC McCabe index: 1 number of parameters: 3 id: 228 unit: def hbaseBulkDelete() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseRDDFunctions.scala start line: 125 end line: 128 size: 4 LOC McCabe index: 1 number of parameters: 3 id: 229 unit: def hbaseForeachPartition() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseRDDFunctions.scala start line: 141 end line: 144 size: 4 LOC McCabe index: 1 number of parameters: 3 id: 230 unit: override def compute() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/NewHBaseRDD.scala start line: 34 end line: 37 size: 4 LOC McCabe index: 1 number of parameters: 2 id: 231 unit: def hbaseForeachPartition() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseDStreamFunctions.scala start line: 135 end line: 138 size: 4 LOC McCabe index: 1 number of parameters: 3 id: 232 unit: override def hashCode() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ColumnFamilyQualifierMapKeyWrapper.scala start line: 59 end line: 62 size: 4 LOC McCabe index: 1 number of parameters: 0 id: 233 unit: override def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 127 end line: 130 size: 4 LOC McCabe index: 2 number of parameters: 1 id: 234 unit: override def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 161 end line: 164 size: 4 LOC McCabe index: 2 number of parameters: 1 id: 235 unit: def build() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 224 end line: 228 size: 4 LOC McCabe index: 1 number of parameters: 2 id: 236 unit: public boolean isAborted() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 121 end line: 123 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 237 unit: public Configuration getConfiguration() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 126 end line: 128 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 238 unit: public BufferedMutator getBufferedMutator() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 131 end line: 133 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 239 unit: public BufferedMutator getBufferedMutator() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 136 end line: 138 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 240 unit: public RegionLocator getRegionLocator() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 141 end line: 143 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 241 unit: public Admin getAdmin() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 151 end line: 153 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 242 unit: public boolean isClosed() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 164 end line: 166 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 243 unit: public RegionLocator getRegionLocator() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 74 end line: 76 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 244 unit: public void close() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 186 end line: 188 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 245 unit: public TableName getName() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 191 end line: 193 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 246 unit: public Configuration getConfiguration() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 196 end line: 198 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 247 unit: public HTableDescriptor getTableDescriptor() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 201 end line: 203 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 248 unit: public TableDescriptor getDescriptor() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaTableForBridge.java start line: 206 end line: 208 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 249 unit: public List getDropRules() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 226 end line: 228 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 250 unit: public List getRouteRules() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 234 end line: 236 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 251 unit: public TopicRule() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRule.java start line: 34 end line: 36 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 252 unit: public Set getTopics() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRule.java start line: 38 end line: 40 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 253 unit: public void setColumnFamily() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 104 end line: 106 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 254 unit: public TableName getTableName() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 183 end line: 185 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 255 unit: public void setTableName() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 191 end line: 193 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 256 unit: public boolean isQualifierEndsWith() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 216 end line: 218 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 257 unit: public boolean isQualifierStartsWith() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/Rule.java start line: 224 end line: 227 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 258 unit: public Delete call() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkDeleteExample.java start line: 77 end line: 79 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 259 unit: public Get call() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkGetExample.java start line: 81 end line: 83 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 260 unit: public Get call() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseMapGetPutExample.java start line: 101 end line: 103 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 261 unit: public String call() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseDistributedScan.java start line: 77 end line: 79 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 262 unit: def release() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 47 end line: 49 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 263 unit: def getScanner() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 113 end line: 115 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 264 unit: def get() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 117 end line: 119 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 265 unit: implicit def ScanResToScan() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 156 end line: 158 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 266 unit: implicit def GetResToResult() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 160 end line: 162 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 267 unit: implicit def TableResToTable() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 164 end line: 166 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 268 unit: implicit def RegionResToRegions() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseResources.scala start line: 168 end line: 170 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 269 unit: def compare() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/package.scala start line: 32 end line: 34 size: 3 LOC McCabe index: 1 number of parameters: 2 id: 270 unit: def addPoint() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 43 end line: 45 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 271 unit: override def run() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 96 end line: 98 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 272 unit: private def close() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 214 end line: 216 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 273 unit: def toSerializedTypedFilter() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 280 end line: 282 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 274 unit: def fromSerializedFilter() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 284 end line: 286 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 275 unit: def addResource() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 306 end line: 308 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 276 unit: def release() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala start line: 309 end line: 311 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 277 unit: def toFields = map.map { case() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableCatalog.scala start line: 130 end line: 132 size: 3 LOC McCabe index: 2 number of parameters: 2 id: 278 unit: def getIndexedProjections() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 238 end line: 240 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 279 unit: def hasOverlap() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 693 end line: 695 size: 3 LOC McCabe index: 1 number of parameters: 2 id: 280 unit: def clear() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 874 end line: 876 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 281 unit: def getFreshByteRange() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala start line: 947 end line: 949 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 282 unit: protected def logInfo() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 55 end line: 57 size: 3 LOC McCabe index: 2 number of parameters: 1 id: 283 unit: protected def logDebug() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 59 end line: 61 size: 3 LOC McCabe index: 2 number of parameters: 1 id: 284 unit: protected def logTrace() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 63 end line: 65 size: 3 LOC McCabe index: 2 number of parameters: 1 id: 285 unit: protected def logWarning() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 67 end line: 69 size: 3 LOC McCabe index: 2 number of parameters: 1 id: 286 unit: protected def logError() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 71 end line: 73 size: 3 LOC McCabe index: 2 number of parameters: 1 id: 287 unit: protected def logInfo() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 76 end line: 78 size: 3 LOC McCabe index: 2 number of parameters: 2 id: 288 unit: protected def logDebug() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 80 end line: 82 size: 3 LOC McCabe index: 2 number of parameters: 2 id: 289 unit: protected def logTrace() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 84 end line: 86 size: 3 LOC McCabe index: 2 number of parameters: 2 id: 290 unit: protected def logWarning() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 88 end line: 90 size: 3 LOC McCabe index: 2 number of parameters: 2 id: 291 unit: protected def logError() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/Logging.scala start line: 92 end line: 94 size: 3 LOC McCabe index: 2 number of parameters: 2 id: 292 unit: override def compareTo() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayComparable.scala start line: 33 end line: 35 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 293 unit: override def hashCode() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayComparable.scala start line: 37 end line: 39 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 294 unit: def add() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/FamiliesQualifiersValues.scala start line: 65 end line: 67 size: 3 LOC McCabe index: 1 number of parameters: 3 id: 295 unit: override def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 146 end line: 148 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 296 unit: override def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 172 end line: 174 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 297 unit: override def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 182 end line: 184 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 298 unit: override def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 192 end line: 194 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 299 unit: override def appendToExpression() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpression.scala start line: 202 end line: 204 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 300 unit: override def compareTo() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayWrapper.scala start line: 33 end line: 35 size: 3 LOC McCabe index: 1 number of parameters: 1 id: 301 unit: override def hashCode() file: spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayWrapper.scala start line: 44 end line: 46 size: 3 LOC McCabe index: 1 number of parameters: 0 id: 302 unit: private KafkaProxy() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java start line: 122 end line: 124 size: 2 LOC McCabe index: 1 number of parameters: 0 id: 303 unit: public void clearRegionLocationCache() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 147 end line: 148 size: 2 LOC McCabe index: 1 number of parameters: 0 id: 304 unit: private DumpToStringListener() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/DumpToStringListener.java start line: 54 end line: 55 size: 2 LOC McCabe index: 1 number of parameters: 0 id: 305 unit: public TopicRoutingRules() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/TopicRoutingRules.java start line: 84 end line: 86 size: 2 LOC McCabe index: 1 number of parameters: 0 id: 306 unit: public DropRule() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/DropRule.java start line: 27 end line: 28 size: 2 LOC McCabe index: 1 number of parameters: 0 id: 307 unit: public void abort() file: kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaBridgeConnection.java start line: 118 end line: 118 size: 1 LOC McCabe index: 1 number of parameters: 2 id: 308 unit: private JavaHBaseBulkDeleteExample() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkDeleteExample.java start line: 40 end line: 40 size: 1 LOC McCabe index: 1 number of parameters: 0 id: 309 unit: private JavaHBaseStreamingBulkPutExample() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseStreamingBulkPutExample.java start line: 39 end line: 39 size: 1 LOC McCabe index: 1 number of parameters: 0 id: 310 unit: private JavaHBaseBulkLoadExample() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java start line: 50 end line: 50 size: 1 LOC McCabe index: 1 number of parameters: 0 id: 311 unit: private JavaHBaseBulkGetExample() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkGetExample.java start line: 43 end line: 43 size: 1 LOC McCabe index: 1 number of parameters: 0 id: 312 unit: private JavaHBaseBulkPutExample() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkPutExample.java start line: 40 end line: 40 size: 1 LOC McCabe index: 1 number of parameters: 0 id: 313 unit: private JavaHBaseMapGetPutExample() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseMapGetPutExample.java start line: 48 end line: 48 size: 1 LOC McCabe index: 1 number of parameters: 0 id: 314 unit: private JavaHBaseDistributedScan() file: spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseDistributedScan.java start line: 42 end line: 42 size: 1 LOC McCabe index: 1 number of parameters: 0