std::shared_ptr load_model_from_path()

in source/neuropod/backends/torchscript/torch_backend.cc [65:119]


std::shared_ptr<torch::jit::script::Module> load_model_from_path(std::istream &                  graph_stream,
                                                                 const std::vector<std::string> &custom_op_paths,
                                                                 const torch::Device &           device)
{
    // Load custom ops
    // TODO(vip): Add a flag allowing users to opt out of loading custom ops

#ifndef __APPLE__
// We need to do this so the custom ops can see the symbols from torch
// This binary is already linked against `libtorch.so`; the dlopen just
// promotes it to RTLD_GLOBAL.
#if CAFFE2_NIGHTLY_VERSION >= 20190601
    void *libtorch = dlopen("libtorch.so", RTLD_NOW | RTLD_GLOBAL | RTLD_NOLOAD);
#else
    void *libtorch = dlopen("libtorch.so.1", RTLD_NOW | RTLD_GLOBAL | RTLD_NOLOAD);
#endif

    if (libtorch == nullptr)
    {
        const auto err = dlerror();
        if (err == nullptr)
        {
            NEUROPOD_ERROR("Failed to promote libtorch to RTLD_GLOBAL; this likely means the neuropod backend library "
                           "was not built correctly");
        }
        else
        {
            NEUROPOD_ERROR("Failed to promote libtorch to RTLD_GLOBAL. Error from dlopen: {}", err);
        }
    }
#endif

    for (const auto &path : custom_op_paths)
    {
        if (dlopen(path.c_str(), RTLD_NOW) == nullptr)
        {
            const auto err = dlerror();
            if (err == nullptr)
            {
                NEUROPOD_ERROR("Failed to load custom op. dlopen failed but no error was available");
            }
            else
            {
                NEUROPOD_ERROR("Failed to load custom op. Error from dlopen: {}", err);
            }
        }
    }

#if CAFFE2_NIGHTLY_VERSION >= 20190717
    auto model = std::make_shared<torch::jit::script::Module>(torch::jit::load(graph_stream, device));
#else
    auto model = torch::jit::load(graph_stream, device);
#endif
    return model;
}