in forecastMetrics/forecastMetrics.py [0:0]
def onEventHandler(event, context):
existingDailyDataKeyList=getExistingHistoricalDataKeyList()
exportResponse = s3_client.list_objects_v2(
Bucket=S3BucketName,
Prefix='ForecastExports'
)
## Filter out all available successful exports, generating metrics and publish to cloudwatch
for content in exportResponse["Contents"]:
key=content["Key"]
try:
# archieve already export already with metrics published
if("_ARCHIVED" in key):
forecastDatasetGroupName=key.split("/")[1].replace("_Forecast","")
config=loadconfig(forecastDatasetGroupName)
exportFolderKey=key.replace("/_ARCHIVED","")
newExportFolderKey="Archived/"+exportFolderKey
move_then_delete_path_v2(s3_client, S3BucketName, exportFolderKey, newExportFolderKey)
except Exception as e:
logger.error("Failed to archive forecast performance, export folder= " + exportFolderKey + ", will skip and continue to process next export")
logger.error(e)
continue
try:
if("_SUCCESS" in key):
forecastDatasetGroupName=key.split("/")[1].replace("_Forecast","")
config=loadconfig(forecastDatasetGroupName)
forecast_starttime=config["forecast_starttime"]
forecast_endtime=config["forecast_endtime"]
#loop through to make sure all the historical data exist
dataAvailable=checkHistoricalDataAvailable(existingDailyDataKeyList,getDateFromString(forecast_starttime),getDateFromString(forecast_endtime))
exportFolderKey=key.replace("/_SUCCESS","")
calculatePublishMetrics(forecastDatasetGroupName,config,exportFolderKey)
if(dataAvailable):
newKey=key.replace("_SUCCESS","_ARCHIVED")
s3_client.put_object(Bucket=S3BucketName, Key=newKey)
s3_client.delete_object(Bucket=S3BucketName, Key=key)
else:
logger.info(forecastDatasetGroupName + " not ready to be archived yet")
except Exception as e:
logger.error("Failed to evaluate forecast performance, export key= " + key + ", will skip and continue to process next export")
logger.error(e)
continue