in Solutions/GoogleWorkspaceReports/Data Connectors/GWorkspaceReportsAPISentinelConn/GWorkspaceReports-TimeTrigger/__init__.py [0:0]
def main(mytimer: func.TimerRequest):
logging.getLogger().setLevel(logging.INFO)
if mytimer.past_due:
logging.info('The timer is past due!')
script_start_time = int(time.time())
logging.info('Starting GWorkspaceReport-TimeTrigger program at {}'.format(time.ctime(int(time.time()))) )
mainQueueHelper = AzureStorageQueueHelper(connectionString=connection_string, queueName="gworkspace-queue-items")
logging.info("Check if we already have enough backlog to process in main queue. Maxmum set is MAX_QUEUE_MESSAGES_MAIN_QUEUE: {} ".format(MAX_QUEUE_MESSAGES_MAIN_QUEUE))
mainQueueCount = mainQueueHelper.get_queue_current_count()
logging.info("Main queue size is {}".format(mainQueueCount))
while (mainQueueCount ) >= MAX_QUEUE_MESSAGES_MAIN_QUEUE:
time.sleep(15)
if check_if_script_runs_too_long(script_start_time):
logging.info("We already have enough messages to process. Not clearing any backlog or reading a new SQS message in this iteration.")
return
mainQueueCount = mainQueueHelper.get_queue_current_count()
latest_timestamp = ""
postactivity_list = GetDates("")
for line in activities:
if check_if_script_runs_too_long(script_start_time):
logging.info("Some more backlog to process, but ending processing for new data for this iteration, remaining will be processed in next iteration")
return
try:
start_time,end_time = GetDates(line)
if start_time is None:
logging.info("There is no last time point, trying to get events for last one day.")
end_time = datetime.strptime(end_time,"%Y-%m-%dT%H:%M:%S.%fZ")
start_time = (end_time - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
end_time = end_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
logging.info("Start time: {} and End time: {} for activity {}".format(start_time,end_time,line))
# Check if start_time is less than current UTC time minus 180 days. If yes, then set end_time to current UTC time minus 179 days
# Google Workspace Reports API only supports 180 days of data
if (datetime.utcnow() - timedelta(days=180)) > datetime.strptime(start_time,"%Y-%m-%dT%H:%M:%S.%fZ"):
logging.info("End time older than 180 days. Setting start time to current UTC time minus 179 days as Google Workspace Reports API only supports 180 days of data.")
start_time = (datetime.utcnow() - timedelta(days=179)).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
start_time = start_time[:-4] + 'Z'
state = StateManager(connection_string)
# check if difference between start_time and end_time is more than TIME_WINDOW_To_POLL_API, if yes, then split time window to make each call to TIME_WINDOW_To_POLL_API
while (convertToDatetime(end_time,"%Y-%m-%dT%H:%M:%S.%fZ") - convertToDatetime(start_time,"%Y-%m-%dT%H:%M:%S.%fZ")).total_seconds() > TIME_WINDOW_To_POLL_API:
if check_if_script_runs_too_long(script_start_time):
logging.info("Some more backlog to process, but ending processing for new data for this iteration, remaining will be processed in next iteration")
return
loop_end_time = end_time
# check if start_time is less than end_time. If yes, then process sending to queue
if not(convertToDatetime(start_time,"%Y-%m-%dT%H:%M:%S.%fZ") >= convertToDatetime(end_time,"%Y-%m-%dT%H:%M:%S.%fZ")):
end_time = (convertToDatetime(start_time,"%Y-%m-%dT%H:%M:%S.%fZ") + timedelta(seconds=TIME_WINDOW_To_POLL_API)).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
end_time = end_time[:-4] + 'Z'
format_message_for_queue_and_add(start_time, end_time, line, mainQueueHelper)
#update state file
latest_timestamp = end_time
postactivity_list[line] = latest_timestamp
logging.info("Updating state file with latest timestamp : {} for activity {}".format(latest_timestamp, line))
state.post(str(json.dumps(postactivity_list)))
start_time = end_time
end_time = loop_end_time
if (convertToDatetime(end_time,"%Y-%m-%dT%H:%M:%S.%fZ") - convertToDatetime(start_time,"%Y-%m-%dT%H:%M:%S.%fZ")).total_seconds() <= TIME_WINDOW_To_POLL_API and not(convertToDatetime(start_time,"%Y-%m-%dT%H:%M:%S.%fZ") >= convertToDatetime(end_time,"%Y-%m-%dT%H:%M:%S.%fZ")):
format_message_for_queue_and_add(start_time, end_time, line, mainQueueHelper)
#update state file
latest_timestamp = end_time
postactivity_list[line] = latest_timestamp
logging.info("Updating state file with latest timestamp : {} for activity {}".format(latest_timestamp, line))
state.post(str(json.dumps(postactivity_list)))
except Exception as err:
logging.error("Something wrong. Exception error text: {}".format(err))
logging.error( "Error: Google Workspace Reports data connector execution failed with an internal server error.")
raise
logging.info('Ending GWorkspaceReport-TimeTrigger program at {}'.format(time.ctime(int(time.time()))) )