adanet/core/estimator.py [1098:1111]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    if not checkpoint_path:
      checkpoint_path = tf.train.latest_checkpoint(self.model_dir)
    logging.info("Exporting SavedModel for AdaNet model at checkpoint: %s",
                 checkpoint_path)
    # Delegate exporting to a temporary estimator instead of super to make
    # passing arguments more functional (via params).
    temp_estimator = self._create_temp_estimator(
        config=self.config,
        best_ensemble_index=self._compute_best_ensemble_index(
            checkpoint_path, hooks=hooks),
        checkpoint_path=checkpoint_path,
        hooks=hooks,
        is_export=True)
    with self._force_replication_strategy():
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



adanet/core/estimator.py [1127:1140]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    if not checkpoint_path:
      checkpoint_path = tf.train.latest_checkpoint(self.model_dir)
    logging.info("Exporting SavedModel for AdaNet model at checkpoint: %s",
                 checkpoint_path)
    # Delegate exporting to a temporary estimator instead of super to make
    # passing arguments more functional (via params).
    temp_estimator = self._create_temp_estimator(
        config=self.config,
        best_ensemble_index=self._compute_best_ensemble_index(
            checkpoint_path, hooks=hooks),
        checkpoint_path=checkpoint_path,
        hooks=hooks,
        is_export=True)
    with self._force_replication_strategy():
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



