def genericTAParse()

in source/extract-ta-data-lambda.py [0:0]


def genericTAParse(client,checkId,accountId,accountName,accountEmail,language,
        Date,dateTime,checkName,category):  
    #Construct File Name (CheckID_AccountID_CheckName_Date_Time.csv)
    resourceFilename=(checkId+"_"+str(accountId)+"_"+str(Date)+"_"+
        str(datetime.utcnow().strftime("%H-%M-%S"))+'.csv')
    summaryFilename=(checkId+"_"+str(accountId)+"_Summary_"+str(Date)+
        "_"+str(datetime.utcnow().strftime("%H-%M-%S"))+'.csv')    
    fileDetails = [{"SummaryFileName":summaryFilename,
                    "SummaryFileSize": 0}, 
                    {"DetailsFileName":resourceFilename,
                    "DetailsFileSize": 0}]
    #Construct S3 Path
    resourceFilePath='TA-Reports/'+category+'/check_'+checkId+'/'+ \
        str(date.today().year)+'/'+str(date.today().month)+'/'+ \
        str(date.today().day)+'/'
    summaryFilePath='TA-Reports/'+category+'/Summary/'+str(date.today().year)+ \
        '/'+str(date.today().month)+'/'+str(date.today().day)+'/'
    #TA Check Module
    result=getTACheckResults(checkId,client,language)
    try:
        summaryFileHeader=list(os.environ[("Header_Summary")].split(","))
        logger.info("Summary Header from environment variables:"+str(summaryFileHeader))
        resourceFileHeader=list(os.environ[("Header_"+checkId)].split(","))
        logger.info(checkId+" Check Header from environment variables:"+str(resourceFileHeader))
        resourceFileSchema=list(os.environ[("Schema_"+checkId)].split(","))
        logger.info(checkId+" Check Schema from environment variables:"+str(resourceFileSchema))
    except Exception as e:
        logger.error("Unable to find env variable : %s" %e)
        raise Exception("Unable to find env variable %s" % e)    
    logger.info("Trusted Advisor Summary Execution Block")
    summaryFileHeader.extend(["AccountId","AccountName","AccountEmail"])
    summaryFileHeader.insert(0,"CheckName") 
    summaryFileHeader.insert(0,"DateTime")
    summaryFileHeader.insert(0,"Date")
    summaryFileRows=[summaryFileHeader]
    summaryFileRow=[Date,dateTime,checkName,result['result']['checkId'],
        result['result']['status'],
        result['result']['resourcesSummary']['resourcesProcessed'],
        result['result']['resourcesSummary']['resourcesFlagged'],
        result['result']['resourcesSummary']['resourcesIgnored'],
        result['result']['resourcesSummary']['resourcesSuppressed']]
    if "costOptimizing" in result['result']['categorySpecificSummary'].keys():
        summaryFileRow.extend(
            [result['result']['categorySpecificSummary']['costOptimizing']\
            ['estimatedMonthlySavings'],result['result']\
            ['categorySpecificSummary']['costOptimizing']\
            ['estimatedPercentMonthlySavings'],
            str(accountId),accountName,accountEmail])
    else:
        summaryFileRow.extend([0,0,str(accountId),accountName,accountEmail])
    logger.info(sanitize_list(summaryFileRow))
    summaryFileRows.append(summaryFileRow)

    
    #Write the Summary Values into a csv file & Copy file to S3
    if len(summaryFileRows) > 1:
        fileDetails[0]['SummaryFileSize'] = write2csv(summaryFileRows,summaryFilename)
        writeToS3(summaryFilename,summaryFilePath)
    
    logger.info("Trusted Advisor Results Execution Block")
    #TA Flagged Resources Execution
    resourceFileHeader.extend(["AccountId","AccountName","AccountEmail"])
    resourceFileHeader.insert(0,"CheckName")
    resourceFileHeader.insert(0,"DateTime")
    resourceFileHeader.insert(0,"Date")
    resourceFileRows=[resourceFileHeader]
    for i in range(0,len(result['result']['flaggedResources'])):
        if result['result']['flaggedResources'][i]['status'] == "warning" or \
            result['result']['flaggedResources'][i]['status'] == "error":
            store=result['result']['flaggedResources'][i]
            resourceFileRow=[]
            for key in resourceFileSchema:
                if key.isdigit() == True:
                    if store['metadata'][int(key)] is None:
                        resourceFileRow.append(store['metadata'][int(key)])
                    else:
                        resourceFileRow.append(
                                store['metadata'][int(key)].replace(",",""))
                else:
                    resourceFileRow.append(store[key])
            resourceFileRow.extend([str(accountId),accountName,accountEmail])
            resourceFileRow.insert(0,checkName)
            resourceFileRow.insert(0,dateTime)
            resourceFileRow.insert(0,Date)
            logger.info(sanitize_list(resourceFileRow))
            resourceFileRows.append(resourceFileRow)
    

    #Write the Resource Values into a csv file & Copy file to S3
    if len(resourceFileRows) > 1:
        fileDetails[1]['DetailsFileSize'] = write2csv(resourceFileRows,resourceFilename)
        writeToS3(resourceFilename,resourceFilePath)
    
    logger.info("Clean /tmp/")
    call('rm -rf /tmp/*', shell=True)
     
    return {"status": result['ResponseMetadata']['HTTPStatusCode'],
            "checkId": checkId, "fileDetails": fileDetails}