in container_images/batch_image/inference.py [0:0]
def get_inference(inputBucket, fileName, region):
s3 = boto3.client('s3')
inputFilePath = '/tmp/'+fileName
inferenceFilePath = '/tmp/'+fileName.split('.')[0]+'_out.csv'
# Download file from S3
try:
s3.download_file(inputBucket, INPUT_PREFIX+fileName, inputFilePath)
except ClientError as e:
logMessage(fileName, "Error retrieving file from S3 using `download_fileobj`" + str(e), LOGTYPE_DEBUG)
# Process file
try:
inference = resnet_pipeline(inputFilePath, fileName)
print("inference from resnet_pipeline as below:")
print(inference)
with open(inferenceFilePath, "w") as ff:
ff.write(inference)
except Exception as e:
logMessage(fileName, "Error processing file " + str(e), LOGTYPE_DEBUG)
# Upload processed file to S3
try:
endTime = datetime.now()
with open(inferenceFilePath, 'rb') as file:
s3.upload_fileobj(
file,
inputBucket,
'output/'+endTime.strftime("%m-%d-%Y-%H:%M:%S.%f")[:-3]+'-'+fileName.split('.')[0]+'.csv'
)
except ClientError as e:
logMessage(fileName, "Can't upload to S3 using `upload_fileobj`" + str(e), LOGTYPE_DEBUG)