in ambari-infra-solr-client/src/main/python/solrDataManager.py [0:0]
def create_command_file(mode, upload, working_dir, upload_file_path, solr_url, collection, filter_field, id_field,
prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path, key_file_path, bucket, key_prefix,
local_path, solr_output_collection, solr_output_url, skip_date_usage):
commands = {}
if upload:
logger.debug("Creating command file with upload and delete instructions in case of an interruption")
else:
logger.debug("Creating command file with delete instructions in case of an interruption")
if upload:
if solr_output_collection:
command_url = solr_output_url if solr_output_url else solr_url
upload_command = "{0}/{1}/update/json/docs?commit=true&wt=json --data-binary @{2}"\
.format(command_url, solr_output_collection, upload_file_path)
upload_command_data = {}
upload_command_data["type"] = "solr"
upload_command_data["command"] = upload_command
upload_command_data["upload_file_path"] = upload_file_path
upload_command_data["solr_output_collection"] = solr_output_collection
commands["upload"] = upload_command_data
elif hdfs_path:
upload_command = "sudo -u {0} hadoop fs -put {1} {2}".format(hdfs_user, upload_file_path, hdfs_path)
upload_command_data = {}
upload_command_data["type"] = "hdfs"
upload_command_data["command"] = upload_command
upload_command_data["upload_file_path"] = upload_file_path
upload_command_data["hdfs_path"] = hdfs_path
upload_command_data["hdfs_user"] = hdfs_user
commands["upload"] = upload_command_data
elif key_file_path:
upload_command = "java -cp {0}/libs/* org.apache.ambari.infra.solr.S3Uploader {1} {2} {3} {4}".format( \
os.path.dirname(os.path.realpath(__file__)), key_file_path, bucket, key_prefix, upload_file_path)
upload_command_data = {}
upload_command_data["type"] = "s3"
upload_command_data["command"] = upload_command
upload_command_data["upload_file_path"] = upload_file_path
upload_command_data["bucket"] = bucket
upload_command_data["key_prefix"] = key_prefix
commands["upload"] = upload_command_data
elif local_path:
upload_command = "mv {0} {1}".format(upload_file_path, local_path)
upload_command_data = {}
upload_command_data["type"] = "local"
upload_command_data["command"] = upload_command
upload_command_data["upload_file_path"] = upload_file_path
upload_command_data["local_path"] = local_path
commands["upload"] = upload_command_data
else:
logger.warn("Unknown upload destination")
sys.exit()
if mode == "save":
return upload_command
if skip_date_usage:
delete_query = "({0}:[*+TO+\"{1}\"])".format(id_field, prev_lot_end_id)
else:
delete_prev = "{0}:[*+TO+\"{1}\"]".format(filter_field, prev_lot_end_value)
delete_last = "({0}:\"{1}\"+AND+{2}:[*+TO+\"{3}\"])".format(filter_field, prev_lot_end_value, id_field, prev_lot_end_id)
delete_query = "{0}+OR+{1}".format(delete_prev, delete_last)
delete_command = "{0}/{1}/update?commit=true&wt=json --data-binary <delete><query>{2}</query></delete>" \
.format(solr_url, collection, delete_query)
if mode == "save":
return delete_command
delete_command_data = {}
delete_command_data["command"] = delete_command
delete_command_data["collection"] = collection
delete_command_data["filter_field"] = filter_field
delete_command_data["id_field"] = id_field
delete_command_data["prev_lot_end_value"] = prev_lot_end_value
delete_command_data["prev_lot_end_id"] = prev_lot_end_id
commands["delete"] = delete_command_data
command_file_path = "{0}/command.json".format(working_dir)
command_file_path_tmp = "{0}.tmp".format(command_file_path)
cft = open(command_file_path_tmp, 'w')
cft.write(json.dumps(commands, indent=4))
os.rename(command_file_path_tmp, command_file_path)
logger.debug("Command file %s was created", command_file_path)
if upload:
return upload_command
else:
return delete_command