def lambda_handler()

in src/timetriggerSNS/app.py [0:0]


def lambda_handler(event, context):

    #Get all Jobs
    jobs = getAllJobIds(jobs_table_name)
    
    finished_jobs =""
    batch_job_status=""
    failedFiles =0

    #Iterate all jobs 
    for job in jobs:
        # Getting and updating status of jobs
        # if there is no status from s3 batch ops then get it and update table
        
        if 'JobStatus' not in job:
           
            #only getting finished statuses here bypassing all intermediate
            batch_job_status, failedFiles = getJobStatusAndUpdateTable(jobs_table_name, job['BatchJobId'], job['JobId'])
            job_status = batch_job_status
        else:
            #batch_job_status = job['BatchOpsStatus']
            job_status = job['JobStatus']
            failedFiles = job['BatchOpsFailedFiles']
        
        # if the s3 batch ops completed then count tasks and send SNS event
        if job_status == "Restoring":

            #Get all tasks/files per job
            filesInProgress = getFilesPerJob(file_table_name, job['JobId'], False)
            filesFinished = getFilesPerJob(file_table_name, job['JobId'], True)

            
            # Handle blank jobs without files
            if len(filesInProgress) == 0 and len(filesFinished) == 0:
                
                #delete job from the table of jobs, files have ttl for 5 days and will automatically cleaned
                cleanDynamoDBtables(jobs_table_name, job['JobId'] )

                #publish event of finishing the proccess
                PublishEvent(job['JobId'], job['BatchJobId'], 'Cancelled', 0, failedFiles)
        
            # if all tasks are finished then send SNS event and delete job from table
            if len(filesInProgress) == 0 and len(filesFinished) > 0:
                finished_jobs = str(job['JobId']) + ", " + finished_jobs
                print("JobID that finished:"+job['JobId'])

                #delete job from the table of jobs, files have ttl for 5 days and will automatically cleaned
                cleanDynamoDBtables(jobs_table_name, job['JobId'] )

                #publish event of finishing the proccess
                PublishEvent(job['JobId'], job['BatchJobId'], 'Complete', 100, failedFiles)
        
            # When we have files in progress lets calculate them and update progress
            if len(filesInProgress) > 0: 
                percentProgress =0
                if job['TotalFiles'] != 0:
                    percentProgress = round(len(filesInProgress) / int(job['TotalFiles']))

                # if current status is the same from DB we will not send anything
                if 'RestoreProgressPercent' in job:
                    if percentProgress == job['RestoreProgressPercent']:
                        print('Status and Progress not changed for job:', job['JobId'])
                    else:

                        #Update table with progress
                        updateJobProgress(jobs_table_name, job['JobId'], percentProgress)

                        #Publish event with progress
                        PublishEvent(job['JobId'], job['BatchJobId'], job_status , percentProgress, failedFiles)
                else:

                    #Update table with progress
                    updateJobProgress(jobs_table_name, job['JobId'], percentProgress)
        
        # If the job is old delete it and send timeout event
        deadlinetime = datetime.datetime.fromisoformat(job['Timestamp']) + datetime.timedelta(days=5)
        #print(deadlinetime)
        if datetime.datetime.now() > deadlinetime:
            #delete job from the table of jobs, files have ttl for 5 days and will automatically cleaned
            cleanDynamoDBtables(jobs_table_name, job['JobId'] )

            #publish event of finishing the proccess
            PublishEvent(job['JobId'], job['BatchJobId'], "Timeout")

            
        # if batch ops status bad send SNS Event and Delete it
        if job_status == "Failed" or job_status == "Cancelled":

            #delete job from the table of jobs, files have ttl for 5 days and will automatically cleaned
            cleanDynamoDBtables(jobs_table_name, job['JobId'] )

            #publish event of finishing the proccess
            PublishEvent(job['JobId'], job['BatchJobId'], job_status)
           
    print('Executed and found these Jobs that are finished: ' + finished_jobs)

    return {
        'statusCode': 200,
        'body': json.dumps('Executed and found these Jobs that are finished: ' + finished_jobs)
    }