in go-example-adaptive-batching-extension/agent/logger.go [44:89]
func NewS3Logger() (*S3Logger, error) {
// Find bucket name
bucket, present := os.LookupEnv("ADAPTIVE_BATCHING_EXTENSION_S3_BUCKET")
if !present {
return nil, errors.New("Environment variable ADAPTIVE_BATCHING_EXTENSION_S3_BUCKET is not set.")
} else {
fmt.Println("Sending logs to:", bucket)
}
// Setup buffer
buffer := bytes.NewBuffer([]byte(""))
buffer.Grow(2 * MAX_PART_SIZE)
// Create the S3 Bucket
err := createBucket(bucket)
if err != nil {
logger.Error("Error creating S3 Bucket")
return nil, err
}
// Create the prefix for the s3 file. Unique to the sandbox environment that this extension is running in
// Format {year}-{month}-{day}-{uuid}
environmentId := uuid.New().String()
t := time.Now().Format("2006-01-02")
prefix := t + "-" + environmentId + "/"
logger.Info("Environment ID: " + environmentId)
// Create filename
fileName := generateFileName()
svc := session.Must(session.NewSession())
// Initialize uploader
uploader := s3manager.NewUploader(svc, func(u *s3manager.Uploader) {
u.PartSize = MAX_PART_SIZE
})
return &S3Logger{
svc: svc,
bucket: bucket,
logBuffer: buffer,
fileName: fileName,
prefix: prefix,
uploader: uploader,
}, nil
}