def loadstoremodel()

in runinferenceutil/infra.py [0:0]


def loadstoremodel():
    state_dict_path = "saved_model"
    model_name = "google/flan-t5-base"
    # Load pre-trained model from hugging face registry or local disk
    model = AutoModelForSeq2SeqLM.from_pretrained(
            model_name, torch_dtype=torch.bfloat16
    )
    #Save Model in local disk
    torch.save(model.state_dict(), state_dict_path)