bool GenerateWrapperAPI()

in tensorflow_lite_support/codegen/android_java_generator.cc [608:766]


bool GenerateWrapperAPI(CodeWriter* code_writer, const ModelInfo& model,
                        ErrorReporter* err) {
  code_writer->Append(R"(public Metadata getMetadata() {
  return metadata;
}
)");
  code_writer->Append(R"(/**
 * Creates interpreter and loads associated files if needed.
 *
 * @throws IOException if an I/O error occurs when loading the tflite model.
 */
public static {{MODEL_CLASS_NAME}} newInstance(Context context) throws IOException {
  return newInstance(context, MODEL_NAME, new Model.Options.Builder().build());
}

/**
 * Creates interpreter and loads associated files if needed, but loading another model in the same
 * input / output structure with the original one.
 *
 * @throws IOException if an I/O error occurs when loading the tflite model.
 */
public static {{MODEL_CLASS_NAME}} newInstance(Context context, String modelPath) throws IOException {
  return newInstance(context, modelPath, new Model.Options.Builder().build());
}

/**
 * Creates interpreter and loads associated files if needed, with running options configured.
 *
 * @throws IOException if an I/O error occurs when loading the tflite model.
 */
public static {{MODEL_CLASS_NAME}} newInstance(Context context, Model.Options runningOptions) throws IOException {
  return newInstance(context, MODEL_NAME, runningOptions);
}

/**
 * Creates interpreter for a user-specified model.
 *
 * @throws IOException if an I/O error occurs when loading the tflite model.
 */
public static {{MODEL_CLASS_NAME}} newInstance(Context context, String modelPath, Model.Options runningOptions) throws IOException {
  Model model = Model.createModel(context, modelPath, runningOptions);
  Metadata metadata = new Metadata(model.getData(), model);
  {{MODEL_CLASS_NAME}} instance = new {{MODEL_CLASS_NAME}}(model, metadata);)");
  for (const auto& tensor : model.inputs) {
    SetCodeWriterWithTensorInfo(code_writer, tensor);
    code_writer->Append(
        R"(  instance.reset{{NAME_U}}Preprocessor(
      instance.buildDefault{{NAME_U}}Preprocessor());)");
  }
  for (const auto& tensor : model.outputs) {
    SetCodeWriterWithTensorInfo(code_writer, tensor);
    code_writer->Append(
        R"(  instance.reset{{NAME_U}}Postprocessor(
      instance.buildDefault{{NAME_U}}Postprocessor());)");
  }
  code_writer->Append(R"(  return instance;
}
)");

  // Pre, post processor setters
  for (const auto& tensor : model.inputs) {
    SetCodeWriterWithTensorInfo(code_writer, tensor);
    code_writer->Append(R"(
public void reset{{NAME_U}}Preprocessor({{PROCESSOR_TYPE}} processor) {
  {{NAME}}Preprocessor = processor;
})");
  }
  for (const auto& tensor : model.outputs) {
    SetCodeWriterWithTensorInfo(code_writer, tensor);
    code_writer->Append(R"(
public void reset{{NAME_U}}Postprocessor({{PROCESSOR_TYPE}} processor) {
  {{NAME}}Postprocessor = processor;
})");
  }
  // Process method
  code_writer->Append(R"(
/** Triggers the model. */
public Outputs process({{INPUT_TYPE_PARAM_LIST}}) {
  Outputs outputs = new Outputs(metadata, {{POSTPROCESSORS_LIST}});
  Object[] inputBuffers = preprocessInputs({{INPUTS_LIST}});
  model.run(inputBuffers, outputs.getBuffer());
  return outputs;
}

/** Closes the model. */
public void close() {
  model.close();
}
)");
  {
    auto block =
        AsBlock(code_writer,
                "private {{MODEL_CLASS_NAME}}(Model model, Metadata metadata)");
    code_writer->Append(R"(this.model = model;
this.metadata = metadata;)");
  }
  for (const auto& tensor : model.inputs) {
    code_writer->NewLine();
    SetCodeWriterWithTensorInfo(code_writer, tensor);
    auto block = AsBlock(
        code_writer,
        "private {{PROCESSOR_TYPE}} buildDefault{{NAME_U}}Preprocessor()");
    code_writer->Append(
        "{{PROCESSOR_TYPE}}.Builder builder = new "
        "{{PROCESSOR_TYPE}}.Builder()");
    if (tensor.content_type == "image") {
      code_writer->Append(R"(    .add(new ResizeOp(
        metadata.get{{NAME_U}}Shape()[1],
        metadata.get{{NAME_U}}Shape()[2],
        ResizeMethod.NEAREST_NEIGHBOR)))");
    }
    if (tensor.normalization_unit >= 0) {
      code_writer->Append(
          R"(    .add(new NormalizeOp(metadata.get{{NAME_U}}Mean(), metadata.get{{NAME_U}}Stddev())))");
    }
    code_writer->Append(
        R"(    .add(new QuantizeOp(
        metadata.get{{NAME_U}}QuantizationParams().getZeroPoint(),
        metadata.get{{NAME_U}}QuantizationParams().getScale()))
    .add(new CastOp(metadata.get{{NAME_U}}Type()));
return builder.build();)");
  }
  for (const auto& tensor : model.outputs) {
    code_writer->NewLine();
    SetCodeWriterWithTensorInfo(code_writer, tensor);
    auto block = AsBlock(
        code_writer,
        "private {{PROCESSOR_TYPE}} buildDefault{{NAME_U}}Postprocessor()");
    code_writer->AppendNoNewLine(
        R"({{PROCESSOR_TYPE}}.Builder builder = new {{PROCESSOR_TYPE}}.Builder()
    .add(new DequantizeOp(
        metadata.get{{NAME_U}}QuantizationParams().getZeroPoint(),
        metadata.get{{NAME_U}}QuantizationParams().getScale())))");
    if (tensor.normalization_unit >= 0) {
      code_writer->AppendNoNewLine(R"(
    .add(new NormalizeOp(metadata.get{{NAME_U}}Mean(), metadata.get{{NAME_U}}Stddev())))");
    }
    code_writer->Append(R"(;
return builder.build();)");
  }
  code_writer->NewLine();
  {
    const auto block =
        AsBlock(code_writer,
                "private Object[] preprocessInputs({{INPUT_TYPE_PARAM_LIST}})");
    CodeWriter param_list_gen(err);
    for (const auto& tensor : model.inputs) {
      SetCodeWriterWithTensorInfo(code_writer, tensor);
      code_writer->Append("{{NAME}} = {{NAME}}Preprocessor.process({{NAME}});");
      SetCodeWriterWithTensorInfo(&param_list_gen, tensor);
      param_list_gen.AppendNoNewLine("{{NAME}}.getBuffer(), ");
    }
    param_list_gen.Backspace(2);
    code_writer->AppendNoNewLine("return new Object[] {");
    code_writer->AppendNoNewLine(param_list_gen.ToString());
    code_writer->Append("};");
  }
  return true;
}