static void loadAndForwardModel()

in NativeApp/app/src/main/cpp/pytorch_nativeapp.cpp [53:72]


static void loadAndForwardModel(JNIEnv *env, jclass, jstring jModelPath) {
  const char *modelPath = env->GetStringUTFChars(jModelPath, 0);
  assert(modelPath);

  // To load torchscript model for mobile we need set these guards,
  // because mobile build doesn't support features like autograd for smaller
  // build size which is placed in `struct JITCallGuard` in this example. It may
  // change in future, you can track the latest changes keeping an eye in
  // android/pytorch_android/src/main/cpp/pytorch_jni_jit.cpp
  JITCallGuard guard;
  torch::jit::Module module = torch::jit::load(modelPath);
  module.eval();
  torch::Tensor x = torch::randn({4, 8});
  torch::Tensor y = torch::randn({8, 5});
  log("x:", x);
  log("y:", y);
  c10::IValue t_out = module.forward({x, y});
  log("result:", t_out);
  env->ReleaseStringUTFChars(jModelPath, modelPath);
}