in distributed_nhc/export_nhc_result_to_kusto.py [0:0]
def ingest_results(results_file, creds, ingest_url, database, results_table_name, nhc_run_uuid="None"):
ts = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
job_name = results_file.replace("\\", "/").split(".")[0].split("/")[-1] # account for \ or / in path
uuid = job_name if nhc_run_uuid == "None" else f"{nhc_run_uuid}-{job_name}"
if uuid == "health":
uuid = ""
else :
uuid = "-" + uuid # add the dash here instead of below; this way if 'uuid' is empty, we don't have a trailing dash
full_uuid = f"nhc-{ts}{uuid}"
vmSize_bash_cmd = "echo $( curl -H Metadata:true --max-time 10 -s \"http://169.254.169.254/metadata/instance/compute/vmSize?api-version=2021-01-01&format=text\") | tr '[:upper:]' '[:lower:]' "
vmSize = run_command(vmSize_bash_cmd)
vmId_bash_cmd = "curl -H Metadata:true --max-time 10 -s \"http://169.254.169.254/metadata/instance/compute/vmId?api-version=2021-02-01&format=text\""
vmId = run_command(vmId_bash_cmd)
vmName_bash_cmd = "hostname"
vmName = run_command(vmName_bash_cmd)
physhost = run_command("echo $(hostname) \"$(/opt/azurehpc/tools/kvp_client | grep Fully)\" | cut -d ':' -f 3 | cut -d ' ' -f 2 | sed 's/\"//g'")
if not physhost:
physhost = "not Mapped"
with open(results_file, 'r') as f:
full_results = f.read()
jsonResultDict = get_nhc_json_formatted_result(results_file)
jsonResult = json.dumps(jsonResultDict)
record = {
'vmSize': vmSize,
'vmId': vmId,
'vmHostname': vmName,
'physHostname': physhost,
'workflowType': "main",
'time': ts,
'pass': False, # keep as default false
'errors': '',
'logOutput': full_results, # the entire file
'jsonResult': jsonResult,
'uuid': full_uuid
}
if "ERROR" in full_results:
record['pass'] = False
record['errors'] = full_results
elif "Node Health Check completed successfully" in full_results:
record['pass'] = True
else:
record['pass'] = False
record['errors'] = "No Node Health Check completed successfully or ERROR"
df = pd.DataFrame(record, index=[0])
ingest_client = QueuedIngestClient(KustoConnectionStringBuilder.with_azure_token_credential(ingest_url, creds))
print(f"Ingesting results from {os.path.basename(results_file)} into {ingest_url} at {database}/{results_table_name}")
ingest_client.ingest_from_dataframe(df, IngestionProperties(database, results_table_name))