in supporting-blog-content/using-openelm-models/OpenELM/generate_openelm.py [0:0]
def openelm_generate_parser():
"""Argument Parser"""
class KwargsParser(argparse.Action):
"""Parser action class to parse kwargs of form key=value"""
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, dict())
for val in values:
if "=" not in val:
raise ValueError(
(
"Argument parsing error, kwargs are expected in"
" the form of key=value."
)
)
kwarg_k, kwarg_v = val.split("=")
try:
converted_v = int(kwarg_v)
except ValueError:
try:
converted_v = float(kwarg_v)
except ValueError:
converted_v = kwarg_v
getattr(namespace, self.dest)[kwarg_k] = converted_v
parser = argparse.ArgumentParser("OpenELM Generate Module")
parser.add_argument(
"--model",
dest="model",
help="Path to the hf converted model.",
required=True,
type=str,
)
parser.add_argument(
"--hf_access_token",
dest="hf_access_token",
help='Hugging face access token, starting with "hf_".',
type=str,
)
parser.add_argument(
"--prompt",
dest="prompt",
help="Prompt for LLM call.",
default="",
type=str,
)
parser.add_argument(
"--device",
dest="device",
help="Device used for inference.",
type=str,
)
parser.add_argument(
"--max_length",
dest="max_length",
help="Maximum length of tokens.",
default=256,
type=int,
)
parser.add_argument(
"--assistant_model",
dest="assistant_model",
help=(
(
"If set, this is used as a draft model "
"for assisted speculative generation."
)
),
type=str,
)
parser.add_argument(
"--generate_kwargs",
dest="generate_kwargs",
help="Additional kwargs passed to the HF generate function.",
type=str,
nargs="*",
action=KwargsParser,
)
return parser.parse_args()