def __init__()

in tensorflow_decision_forests/keras/wrappers_pre_generated.py [0:0]


  def __init__(
      self,
      task: Optional[TaskType] = core.Task.CLASSIFICATION,
      features: Optional[List[core.FeatureUsage]] = None,
      exclude_non_specified_features: Optional[bool] = False,
      preprocessing: Optional["tf.keras.models.Functional"] = None,
      postprocessing: Optional["tf.keras.models.Functional"] = None,
      ranking_group: Optional[str] = None,
      uplift_treatment: Optional[str] = None,
      temp_directory: Optional[str] = None,
      verbose: int = 1,
      hyperparameter_template: Optional[str] = None,
      advanced_arguments: Optional[AdvancedArguments] = None,
      num_threads: Optional[int] = None,
      name: Optional[str] = None,
      max_vocab_count: Optional[int] = 2000,
      try_resume_training: Optional[bool] = True,
      check_dataset: Optional[bool] = True,
      apply_link_function: Optional[bool] = True,
      force_numerical_discretization: Optional[bool] = False,
      max_depth: Optional[int] = 6,
      max_unique_values_for_discretized_numerical: Optional[int] = 16000,
      maximum_model_size_in_memory_in_bytes: Optional[float] = -1.0,
      maximum_training_duration_seconds: Optional[float] = -1.0,
      min_examples: Optional[int] = 5,
      num_candidate_attributes: Optional[int] = -1,
      num_candidate_attributes_ratio: Optional[float] = -1.0,
      num_trees: Optional[int] = 300,
      random_seed: Optional[int] = 123456,
      shrinkage: Optional[float] = 0.1,
      use_hessian_gain: Optional[bool] = False,
      worker_logs: Optional[bool] = True,
      explicit_args: Optional[Set[str]] = None):

    learner_params = {
        "apply_link_function":
            apply_link_function,
        "force_numerical_discretization":
            force_numerical_discretization,
        "max_depth":
            max_depth,
        "max_unique_values_for_discretized_numerical":
            max_unique_values_for_discretized_numerical,
        "maximum_model_size_in_memory_in_bytes":
            maximum_model_size_in_memory_in_bytes,
        "maximum_training_duration_seconds":
            maximum_training_duration_seconds,
        "min_examples":
            min_examples,
        "num_candidate_attributes":
            num_candidate_attributes,
        "num_candidate_attributes_ratio":
            num_candidate_attributes_ratio,
        "num_trees":
            num_trees,
        "random_seed":
            random_seed,
        "shrinkage":
            shrinkage,
        "use_hessian_gain":
            use_hessian_gain,
        "worker_logs":
            worker_logs,
    }

    if hyperparameter_template is not None:
      learner_params = core._apply_hp_template(
          learner_params, hyperparameter_template,
          self.predefined_hyperparameters(), explicit_args)

    super(DistributedGradientBoostedTreesModel, self).__init__(
        task=task,
        learner="DISTRIBUTED_GRADIENT_BOOSTED_TREES",
        learner_params=learner_params,
        features=features,
        exclude_non_specified_features=exclude_non_specified_features,
        preprocessing=preprocessing,
        postprocessing=postprocessing,
        ranking_group=ranking_group,
        uplift_treatment=uplift_treatment,
        temp_directory=temp_directory,
        verbose=verbose,
        advanced_arguments=advanced_arguments,
        num_threads=num_threads,
        name=name,
        max_vocab_count=max_vocab_count,
        try_resume_training=try_resume_training,
        check_dataset=check_dataset)