in djl-tensorflow-lite-inference-docker-lambda/src/main/java/com/example/App.java [37:69]
public void handleRequest(InputStream is, OutputStream os, Context context) throws IOException {
LambdaLogger logger = context.getLogger();
String input = Utils.toString(is);
Request request = GSON.fromJson(input, Request.class);
String inputImageUrl = request.getInputImageUrl();
logger.log("inputImageUrl: " + inputImageUrl);
try {
Criteria<Image, Classifications> criteria = Criteria.builder()
.setTypes(Image.class, Classifications.class)
.optEngine("TFLite")
.optFilter("dataset", "aiyDish")
.build();
ZooModel<Image, Classifications> model = ModelZoo.loadModel(criteria);
Predictor<Image, Classifications> predictor = model.newPredictor();
Image image = ImageFactory.getInstance().fromUrl(inputImageUrl);
Classifications classifications = predictor.predict(image);
logger.log("Classifications: " + classifications);
os.write(GSON.toJson(classifications.best()).getBytes(StandardCharsets.UTF_8));
} catch (RuntimeException | ModelException | TranslateException e) {
logger.log("Failed handle input: " + input);
logger.log(e.toString());
String msg = "{\"status\": \"invoke failed: " + e.toString() + "\"}";
os.write(msg.getBytes(StandardCharsets.UTF_8));
}
}