in src/sagemaker/model.py [0:0]
def _upload_code(self, key_prefix: str, repack: bool = False) -> None:
"""Uploads code to S3 to be used with script mode with SageMaker inference.
Args:
key_prefix (str): The S3 key associated with the ``code_location`` parameter of the
``Model`` class.
repack (bool): Optional. Set to ``True`` to indicate that the source code and model
artifact should be repackaged into a new S3 object. (default: False).
"""
local_code = utils.get_config_value("local.local_code", self.sagemaker_session.config)
bucket, key_prefix = s3.determine_bucket_and_prefix(
bucket=self.bucket,
key_prefix=key_prefix,
sagemaker_session=self.sagemaker_session,
)
if (self.sagemaker_session.local_mode and local_code) or self.entry_point is None:
self.uploaded_code = None
elif not repack:
self.uploaded_code = fw_utils.tar_and_upload_dir(
session=self.sagemaker_session.boto_session,
bucket=bucket,
s3_key_prefix=key_prefix,
script=self.entry_point,
directory=self.source_dir,
dependencies=self.dependencies,
kms_key=self.model_kms_key,
settings=self.sagemaker_session.settings,
)
if repack and self.model_data is not None and self.entry_point is not None:
if isinstance(self.model_data, dict):
logging.warning("ModelDataSource currently doesn't support model repacking")
return
if is_pipeline_variable(self.model_data):
# model is not yet there, defer repacking to later during pipeline execution
if not isinstance(self.sagemaker_session, PipelineSession):
logging.warning(
"The model_data is a Pipeline variable of type %s, "
"which should be used under `PipelineSession` and "
"leverage `ModelStep` to create or register model. "
"Otherwise some functionalities e.g. "
"runtime repack may be missing. For more, see: "
"https://sagemaker.readthedocs.io/en/stable/"
"amazon_sagemaker_model_building_pipeline.html#model-step",
type(self.model_data),
)
return
self.sagemaker_session.context.need_runtime_repack.add(id(self))
self.sagemaker_session.context.runtime_repack_output_prefix = s3.s3_path_join(
"s3://", bucket, key_prefix
)
# Add the uploaded_code and repacked_model_data to update the container env
self.repacked_model_data = self.model_data
self.uploaded_code = fw_utils.UploadedCode(
s3_prefix=self.repacked_model_data,
script_name=os.path.basename(self.entry_point),
)
return
if local_code and self.model_data.startswith("file://"):
repacked_model_data = self.model_data
else:
repacked_model_data = "s3://" + "/".join([bucket, key_prefix, "model.tar.gz"])
self.uploaded_code = fw_utils.UploadedCode(
s3_prefix=repacked_model_data,
script_name=os.path.basename(self.entry_point),
)
logger.info(
"Repacking model artifact (%s), script artifact "
"(%s), and dependencies (%s) "
"into single tar.gz file located at %s. "
"This may take some time depending on model size...",
self.model_data,
self.source_dir,
self.dependencies,
repacked_model_data,
)
utils.repack_model(
inference_script=self.entry_point,
source_directory=self.source_dir,
dependencies=self.dependencies,
model_uri=self.model_data,
repacked_model_uri=repacked_model_data,
sagemaker_session=self.sagemaker_session,
kms_key=self.model_kms_key,
)
self.repacked_model_data = repacked_model_data