_hash_to_scan

in linkis-engineconn-plugins/hbase/hbase-shims-2.5.3/src/main/resources/hbase-ruby/hbase/table.rb [509:596]


    def _hash_to_scan(args)
      if args.any?
        enablemetrics = args['ALL_METRICS'].nil? ? false : args['ALL_METRICS']
        enablemetrics ||= !args['METRICS'].nil?
        filter = args['FILTER']
        startrow = args['STARTROW'] || ''
        stoprow = args['STOPROW']
        rowprefixfilter = args['ROWPREFIXFILTER']
        timestamp = args['TIMESTAMP']
        columns = args['COLUMNS'] || args['COLUMN'] || []
        
        cache_blocks = args['CACHE_BLOCKS'].nil? ? true : args['CACHE_BLOCKS']
        cache = args['CACHE'] || 0
        reversed = args['REVERSED'] || false
        versions = args['VERSIONS'] || 1
        timerange = args[TIMERANGE]
        raw = args['RAW'] || false
        attributes = args[ATTRIBUTES]
        authorizations = args[AUTHORIZATIONS]
        consistency = args[CONSISTENCY]
        
        columns = [columns] if columns.class == String
        limit = args['LIMIT'] || -1
        replica_id = args[REGION_REPLICA_ID]
        isolation_level = args[ISOLATION_LEVEL]
        read_type = args[READ_TYPE]
        allow_partial_results = args[ALLOW_PARTIAL_RESULTS].nil? ? false : args[ALLOW_PARTIAL_RESULTS]
        batch = args[BATCH] || -1
        max_result_size = args[MAX_RESULT_SIZE] || -1

        unless columns.is_a?(Array)
          raise ArgumentError, 'COLUMNS must be specified as a String or an Array'
        end

        scan = if stoprow
                 org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes, stoprow.to_java_bytes)
               else
                 org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes)
               end

        
        scan.setStartStopRowForPrefixScan(rowprefixfilter.to_java_bytes) if rowprefixfilter

        
        @converters.clear

        columns.each do |c|
          family, qualifier = parse_column_name(c.to_s)
          if qualifier
            scan.addColumn(family, qualifier)
          else
            scan.addFamily(family)
          end
        end

        if filter.class == String
          scan.setFilter(
            org.apache.hadoop.hbase.filter.ParseFilter.new.parseFilterString(filter.to_java_bytes)
          )
        else
          scan.setFilter(filter)
        end

        scan.setScanMetricsEnabled(enablemetrics) if enablemetrics
        scan.setTimestamp(timestamp) if timestamp
        scan.setCacheBlocks(cache_blocks)
        scan.setReversed(reversed)
        scan.setCaching(cache) if cache > 0
        scan.setMaxVersions(versions) if versions > 1
        scan.setTimeRange(timerange[0], timerange[1]) if timerange
        scan.setRaw(raw)
        scan.setLimit(limit) if limit > 0
        set_attributes(scan, attributes) if attributes
        set_authorizations(scan, authorizations) if authorizations
        scan.setConsistency(org.apache.hadoop.hbase.client.Consistency.valueOf(consistency)) if consistency
        scan.setReplicaId(replica_id) if replica_id
        scan.setIsolationLevel(org.apache.hadoop.hbase.client.IsolationLevel.valueOf(isolation_level)) if isolation_level
        scan.setReadType(org.apache.hadoop.hbase.client::Scan::ReadType.valueOf(read_type)) if read_type
        scan.setAllowPartialResults(allow_partial_results) if allow_partial_results
        scan.setBatch(batch) if batch > 0
        scan.setMaxResultSize(max_result_size) if max_result_size > 0
      else
        scan = org.apache.hadoop.hbase.client.Scan.new
      end

      scan
    end