def save_data()

in ambari-infra-solr-client/src/main/python/solrDataManager.py [0:0]


def save_data(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, id_field,
              range_end, read_block_size, write_block_size, working_dir, additional_filter, name, json_file,
              compression, hdfs_user, hdfs_path, key_file_path, bucket, key_prefix, local_path, solr_output_collection,
              solr_output_url, exclude_fields, skip_date_usage):
  logger.info("Starting to save data")

  tmp_file_path = "{0}/tmp.json".format(working_dir)

  prev_lot_end_value = None
  prev_lot_end_id = None

  if skip_date_usage:
    if additional_filter:
      q = quote("*:*+AND+{0}".format(additional_filter), safe="/+\"*")
    else:
      q = quote("*:*", safe="/+\"*")
    sort = quote("{0}+asc".format(id_field), safe="/+\"*")
  else:
    if additional_filter:
      q = quote("{0}+AND+{1}:[*+TO+\"{2}\"]".format(additional_filter, filter_field, range_end), safe="/+\"*")
    else:
      q = quote("{0}:[*+TO+\"{1}\"]".format(filter_field, range_end), safe="/+\"*")
    sort = quote("{0}+asc,{1}+asc".format(filter_field, id_field), safe="/+\"*")

  solr_query_url_prefix = "{0}/{1}/select?q={2}&sort={3}&rows={4}&wt=json".format(solr_url, collection, q, sort, read_block_size)

  exclude_field_list = exclude_fields.split(',') if exclude_fields else None
  if solr_output_collection and not exclude_field_list:
    exclude_field_list = ['_version_']

  done = False
  total_records = 0
  while not done:
    results = create_block(tmp_file_path, solr_kinit_command, curl_prefix, solr_query_url_prefix, filter_field,
                           id_field, range_end, write_block_size, prev_lot_end_value, prev_lot_end_id, json_file,
                           exclude_field_list, skip_date_usage)
    done = results[0]
    records = results[1]
    prev_lot_end_value = results[2]
    prev_lot_end_id = results[3]

    if records > 0:
      upload_block(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field,
                   id_field, working_dir, tmp_file_path, name, prev_lot_end_value, prev_lot_end_id, hdfs_user,
                   hdfs_path, key_file_path, bucket, key_prefix, local_path, compression, solr_output_collection,
                   solr_output_url, skip_date_usage)
      total_records += records
      logger.info("A total of %d records are saved", total_records)