in metaflow/cmd/develop/stub_generator.py [0:0]
def write_out(self):
out_dir = self._output_dir
os.makedirs(out_dir, exist_ok=True)
# Write out py.typed (pylance seems to require it even though it is not
# required in PEP 561) as well as a file we will use to check the "version"
# of the stubs -- this helps to inform the user if the stubs were generated
# for another version of Metaflow.
pathlib.Path(os.path.join(out_dir, "py.typed")).touch()
if self._write_generated_for:
pathlib.Path(os.path.join(out_dir, "generated_for.txt")).write_text(
"%s %s"
% (self._mf_version, datetime.fromtimestamp(time.time()).isoformat())
)
post_process_modules = []
is_post_processing = False
while len(self._pending_modules) != 0 or len(post_process_modules) != 0:
if is_post_processing or len(self._pending_modules) == 0:
is_post_processing = True
module_alias, module_name = post_process_modules.pop(0)
else:
module_alias, module_name = self._pending_modules.pop(0)
# Skip vendored stuff
if module_alias.startswith("metaflow._vendor") or module_name.startswith(
"metaflow._vendor"
):
continue
# We delay current module and deployer module to the end since they
# depend on info we gather elsewhere
if (
module_alias
in (
METAFLOW_CURRENT_MODULE_NAME,
METAFLOW_DEPLOYER_MODULE_NAME,
)
and len(self._pending_modules) != 0
):
post_process_modules.append((module_alias, module_name))
continue
if module_alias in self._done_modules:
continue
self._done_modules.add(module_alias)
# If not, we process the module
self._reset()
self._get_module(module_alias, module_name)
if module_name == "metaflow" and not is_post_processing:
# We will want to regenerate this at the end to take into account
# any changes to the Deployer
post_process_modules.append((module_name, module_name))
self._done_modules.remove(module_name)
continue
self._generate_stubs()
if hasattr(self._current_module, "__path__"):
# This is a package (so a directory) and we are dealing with
# a __init__.pyi type of case
dir_path = os.path.join(self._output_dir, *module_alias.split(".")[1:])
else:
# This is NOT a package so the original source file is not a __init__.py
dir_path = os.path.join(
self._output_dir, *module_alias.split(".")[1:-1]
)
out_file = os.path.join(
dir_path, os.path.basename(self._current_module.__file__) + "i"
)
width = 100
os.makedirs(os.path.dirname(out_file), exist_ok=True)
# We want to make sure we always have a __init__.pyi in the directories
# we are creating
parts = dir_path.split(os.sep)[len(self._output_dir.split(os.sep)) :]
for i in range(1, len(parts) + 1):
init_file_path = os.path.join(
self._output_dir, *parts[:i], "__init__.pyi"
)
if not os.path.exists(init_file_path):
with open(init_file_path, mode="w", encoding="utf-8") as f:
self._write_header(f, width)
with open(out_file, mode="w", encoding="utf-8") as f:
self._write_header(f, width)
f.write("from __future__ import annotations\n\n")
imported_typing = False
for module in self._imports:
f.write("import " + module + "\n")
if module == "typing":
imported_typing = True
if self._typing_imports:
if not imported_typing:
f.write("import typing\n")
imported_typing = True
f.write("if typing.TYPE_CHECKING:\n")
for module in self._typing_imports:
f.write(TAB + "import " + module + "\n")
if self._typevars:
if not imported_typing:
f.write("import typing\n")
imported_typing = True
for type_name, type_var in self._typevars.items():
if isinstance(type_var, TypeVar):
f.write(
"%s = %s\n" % (type_name, type_var_to_str(type_var))
)
else:
f.write(
"%s = %s\n" % (type_name, new_type_to_str(type_var))
)
f.write("\n")
for import_line in self._current_references:
f.write(import_line + "\n")
f.write("\n")
for stub in self._stubs:
f.write(stub + "\n")
if is_post_processing:
# Don't consider any pending modules if we are post processing
self._pending_modules.clear()