in src/sagemaker/tuner.py [0:0]
def _attach_with_training_details_list(cls, sagemaker_session, estimator_cls, job_details):
"""Create a HyperparameterTuner bound to an existing hyperparameter tuning job.
The tuning job has the ``TrainingJobDefinitions`` field set in this case.
"""
estimator_names = sorted(
[
training_details["DefinitionName"]
for training_details in job_details["TrainingJobDefinitions"]
]
)
cls._validate_dict_argument(
name="estimator_cls", value=estimator_cls, allowed_keys=estimator_names
)
estimator_dict = {}
objective_metric_name_dict = {}
hyperparameter_ranges_dict = {}
metric_definitions_dict = {}
for training_details in job_details["TrainingJobDefinitions"]:
estimator_name = training_details["DefinitionName"]
estimator_dict[estimator_name] = cls._prepare_estimator(
estimator_cls=estimator_cls.get(estimator_name) if estimator_cls else None,
training_details=training_details,
parameter_ranges=training_details["HyperParameterRanges"],
sagemaker_session=sagemaker_session,
)
objective_metric_name_dict[estimator_name] = training_details["TuningObjective"][
"MetricName"
]
hyperparameter_ranges_dict[
estimator_name
] = cls._prepare_parameter_ranges_from_job_description( # noqa: E501 # pylint: disable=line-too-long
training_details["HyperParameterRanges"]
)
metric_definitions = training_details["AlgorithmSpecification"].get(
"MetricDefinitions", None
)
if metric_definitions is not None:
metric_definitions_dict[estimator_name] = metric_definitions
init_params = cls._prepare_init_params_from_job_description(job_details)
return HyperparameterTuner.create(
estimator_dict=estimator_dict,
objective_metric_name_dict=objective_metric_name_dict,
hyperparameter_ranges_dict=hyperparameter_ranges_dict,
metric_definitions_dict=metric_definitions_dict,
**init_params
)