in ambari-infra-solr-client/src/main/python/solrDataManager.py [0:0]
def upload_file_hdfs(hdfs_kinit_command, upload_command, upload_file_path, hdfs_path, hdfs_user):
if hdfs_kinit_command:
run_kinit(hdfs_kinit_command, "HDFS")
try:
hdfs_file_exists_command = "sudo -u {0} hadoop fs -test -e {1}".format(hdfs_user, hdfs_path + os.path.basename(upload_file_path))
logger.debug("Checking if file already exists on hdfs:\n%s", hdfs_file_exists_command)
hdfs_file_exists = (0 == call(hdfs_file_exists_command.split()))
except Exception as e:
print()
logger.warn("Could not execute command to check if file already exists on HDFS:\n%s", hdfs_file_exists_command)
logger.warn(str(e))
sys.exit()
if os.path.isfile(upload_file_path) and not hdfs_file_exists:
try:
logger.debug("Uploading file to hdfs:\n%s", upload_command)
result = call(upload_command.split())
except Exception as e:
print()
logger.warn("Could not execute command to upload file to HDFS:\n%s", upload_command)
logger.warn(str(e))
sys.exit()
if result != 0:
logger.warn("Could not upload file to HDFS with command:\n%s", upload_command)
sys.exit()
logger.info("File %s was uploaded to hdfs %s", os.path.basename(upload_file_path), hdfs_path)
os.remove(upload_file_path)