in s3-logs-extension-demo-zip-archive/extensionssrc/extensions/logs_api_http_extension.py [0:0]
def run_forever(self):
# Configuring S3 Connection
s3_bucket = (os.environ['S3_BUCKET_NAME'])
s3 = boto3.resource('s3')
print(f"extension.logs_api_http_extension: Receiving Logs {self.agent_name}")
while True:
resp = self.extensions_api_client.next(self.agent_id)
# Process the received batches if any.
while not self.queue.empty():
batch = self.queue.get_nowait()
# This following line logs the events received to CloudWatch.
# Replace it to send logs to elsewhere.
# If you've subscribed to extension logs, e.g. "types": ["platform", "function", "extension"],
# you'll receive the logs of this extension back from Logs API.
# And if you log it again with the line below, it will create a cycle since you receive it back again.
# Use `extension` log type if you'll egress it to another endpoint,
# or make sure you've implemented a protocol to handle this case.
# print(f"Log Batch Received from Lambda: {batch}", flush=True)
# There are two options illustrated:
# 1. Sending the entire log batch to S3
# 2. Parsing the batch and sending individual log lines.
# This could be used to parse the log lines and only selectively send logs to S3, or amend for any other destination.
# 1. The following line writes the entire batch to S3
s3_filename = (os.environ['AWS_LAMBDA_FUNCTION_NAME'])+'-'+(datetime.now().strftime('%Y-%m-%d-%H:%M:%S.%f'))+'.log'
try:
response = s3.Bucket(s3_bucket).put_object(Key=s3_filename, Body=str(batch))
except Exception as e:
raise Exception(f"Error sending log to S3 {e}") from e