in translate_json/s3_event_handler.py [0:0]
def processRequest(request):
output = ""
logger.info("request: {}".format(request))
bucketName = request["bucketName"]
sourceLanguageCode = request["sourceLanguage"]
targetLanguageCode = request["targetLanguage"]
access_role = request["access_role"]
triggerFile = request["trigger_file"]
try:
# Filter only the JSON files for processing
objs = S3Helper().getFilteredFileNames(bucketName,"input/","json")
for obj in objs:
try:
content = S3Helper().readFromS3(bucketName,obj)
logger.debug(content)
jsonDocument = json.loads(content)
print(jsonDocument)
# Convert the JSON document into XML
outputXML = json2xml.Json2xml(jsonDocument, attr_type=False).to_xml()
logger.debug(outputXML)
newObjectKey = "xmlin/{}.xml".format(FileHelper.getFileName(obj))
# Store the XML in the S3 location for Translation
S3Helper().writeToS3(str(outputXML),bucketName,newObjectKey)
output = "Output Object: {}/{}".format(bucketName, newObjectKey)
logger.debug(output)
# Rename the JSON files to prevent reprocessing
S3Helper().renameObject(bucketName,obj,"{}.processed".format(obj))
except ValueError:
logger.error("Error occured loading the json file:{}".format(obj))
except ClientError as e:
logger.error("An error occured with S3 Bucket Operation: %s" % e)
# Start the translation batch job using Amazon Translate
startTranslationJob(bucketName,sourceLanguageCode,targetLanguageCode,access_role)
S3Helper().deleteObject(bucketName,"input/{}".format(triggerFile))
except ClientError as e:
logger.error("An error occured with S3 Bucket Operation: %s" % e)