def getEpochs: Int = $()

in sagemaker-spark-sdk/src/main/scala/com/amazonaws/services/sagemaker/sparksdk/algorithms/LinearLearnerSageMakerEstimator.scala [149:509]


  def getEpochs: Int = $(epochs)

  /**
    * Whether training is for binary classification or regression.
    * Supported options: "binary_classifier", and "regressor".
    * Required
    */
  private[algorithms] val predictorType : Param[String] = new Param(this, "predictor_type",
    "Whether training is for binary classification or regression. " +
    "Supported options: 'binary_classifier', 'multiclass_classifier', and 'regressor'.",
    ParamValidators.inArray(Array("binary_classifier", "multiclass_classifier", "regressor")))

  /**
    * Whether model should include bias.
    * Default: "True".
    */
  val useBias : Param[String] = new Param(this, "use_bias",
    "Whether model should include bias. ",
    ParamValidators.inArray(Array("True", "False")))
  def getUseBias: Boolean = parseTrueAndFalse(useBias)

  /**
    * Number of models to train in parallel. Must be > 0 or "auto".
    * If default "auto" is selected, the number of parallel models to train will be decided by
    * the algorithm itself.
    * Default: "auto".
    */
  val numModels : Param[String] = new Param(this, "num_models",
    "Number of models to train in parallel. Must be > 0 or 'auto'",
    autoOrAboveParamValidator(0, false))
  def getNumModels: String = $(numModels)

  /**
    * Number of samples to use from validation dataset for doing model calibration
    * (finding the best threshold). Must be > 0.
    * Default: 10000000.
    */
  val numCalibrationSamples : IntParam = new IntParam(this, "num_calibration_samples",
    "Number of samples to use from validation dataset for doing model calibration" +
      " (finding the best threshold). Must be > 0.",
    ParamValidators.gt(0))
  def getNumCalibrationSamples: Int = $(numCalibrationSamples)

  /**
    * Initialization function for the model weights. Supported options: "uniform" and "normal".
    * uniform: uniformly between (-scale, +scale)
    * normal: normal with mean 0 and sigma
    * Default: "uniform".
    */
  val initMethod : Param[String] = new Param(this, "init_method",
    "Initialization function for the model weights. Supported options: 'uniform' and 'normal'.",
    ParamValidators.inArray(Array("uniform", "normal")))
  def getInitMethod: String = $(initMethod)

  /**
    * Scale for init method uniform. Must be > 0.
    * Default: 0.07.
    */
  val initScale : DoubleParam = new DoubleParam(this, "init_scale",
    "Scale for init method uniform. Must be > 0.",
    ParamValidators.gt(0))
  def getInitScale: Double = $(initScale)

  /**
    * Standard deviation for init method normal. Must be > 0.
    * Default: 0.01.
    */
  val initSigma : DoubleParam = new DoubleParam(this, "init_sigma",
    "Standard deviation for init method normal. Must be > 0.",
    ParamValidators.gt(0))
  def getInitSigma: Double = $(initSigma)

  /**
    * Initial weight for bias.
    * Default: 0.
    */
  val initBias : DoubleParam = new DoubleParam(this, "init_bias",
    "Initial weight for bias. " + "Must be number.")
  def getInitBias: Double = $(initBias)

  /**
    * Which optimizer is to be used. Supported options: "sgd", "adam", "rmsprop" and "auto".
    * Default: "auto".
    */
  val optimizer : Param[String] = new Param(this, "optimizer", "Which optimizer is to be used. " +
    "Supported options: 'sgd', 'adam', 'rmsprop' and 'auto'.",
    ParamValidators.inArray(Array("sgd", "adam", "rmsprop", "auto")))
  def getOptimizer: String = $(optimizer)

  /**
    * The loss function to apply. Supported options: "logistic", "squared_loss", "absolute_loss",
    * "hinge_loss", "eps_insensitive_squared_loss", "eps_insensitive_absolute_loss",
    * "quantile_loss", "huber_loss", "softmax_loss" and "auto".
    * Default: "auto".
    */
  val loss : Param[String] = new Param(this, "loss", "The loss function to apply. " +
    "Supported options: 'logistic', 'squared_loss', 'absolute_loss', 'hinge_loss'," +
    "'eps_insensitive_squared_loss', 'eps_insensitive_absolute_loss', 'quantile_loss'," +
    "'huber_loss', 'softmax_loss' and 'auto'.",
    ParamValidators.inArray(Array("logistic", "squared_loss", "absolute_loss", "hinge_loss",
      "eps_insensitive_squared_loss", "eps_insensitive_absolute_loss", "quantile_loss",
      "huber_loss", "softmax_loss", "auto")))
  def getLoss: String = $(loss)

  /**
    * The L2 regularization, i.e. the weight decay parameter. Use 0 for no L2 regularization.
    * Must be >= 0.
    * Default: 0.
    */
  val wd : DoubleParam = new DoubleParam(this, "wd",
    "The L2 regularization, i.e. the weight decay parameter. Must be >= 0.",
    ParamValidators.gtEq(0))
  def getWd: Double = $(wd)

  /**
    * The L1 regularization parameter. Use 0 for no L1 regularization. Must be >= 0.
    * Default: 0.
    */
  val l1 : DoubleParam = new DoubleParam(this, "l1",
    "The L1 regularization parameter. Use 0 for no L1 regularization. Must be >= 0.",
    ParamValidators.gtEq(0))
  def getL1: Double = $(l1)

  /**
    * Momentum parameter of sgd optimizer. Must be in range [0, 1).
    * Default: 0.
    */
  val momentum : DoubleParam = new DoubleParam(this, "momentum",
    "Momentum parameter of sgd optimizer. Must be in range [0, 1).",
    ParamValidators.inRange(0.0, 1.0, true, false))
  def getMomentum: Double = $(momentum)

  /**
    * The learning rate. Must be > 0 or "auto".
    * Default: "auto".
    */
  val learningRate : Param[String] = new Param(this, "learning_rate",
    "The learning rate. Must be > 0 or 'auto'",
    autoOrAboveParamValidator(0, false))
  def getLearningRate: String = $(learningRate)

  /**
    * Parameter specific to adam optimizer. Exponential decay rate for first moment estimates.
    * Ignored when optimizer is not adam. Must be in range [0, 1).
    * Default: 0.9.
    */
  val beta1 : DoubleParam = new DoubleParam(this, "beta_1",
    "Parameter specific to adam optimizer. Exponential decay rate for first moment estimates. " +
      "Ignored when optimizer is not adam. Must be in range [0, 1).",
    ParamValidators.inRange(0.0, 1.0, true, false))
  def getBeta1: Double = $(beta1)

  /**
    * Parameter specific to adam optimizer. Exponential decay rate for second moment estimates.
    * Ignored when optimizer is not adam. Must be in range [0, 1).
    * Default: 0.999.
    */
  val beta2 : DoubleParam = new DoubleParam(this, "beta_2",
    "Parameter specific to adam optimizer. exponential decay rate for second moment estimates. " +
      "Ignored when optimizer is not adam. Must be in range [0, 1).",
    ParamValidators.inRange(0.0, 1.0, true, false))
  def getBeta2: Double = $(beta2)

  /**
    * Learning rate bias multiplier.
    * The actual learning rate for the bias is learning rate times bias_lr_mult. Must be > 0.
    * Default: 10.
    */
  val biasLrMult : DoubleParam = new DoubleParam(this, "bias_lr_mult",
    "Learning rate bias multiplier. " +
      "The actual learning rate for the bias is learning rate times bias_lr_mult. " +
      "Must be > 0.", ParamValidators.gt(0))
  def getBiasLrMult: Double = $(biasLrMult)

  /**
    * Weight decay parameter multiplier.
    * The actual L2 regularization weight for the bias is wd times bias_wd_mult. Must be >= 0.
    * Default: 0.
    */
  val biasWdMult : DoubleParam = new DoubleParam(this, "bias_wd_mult",
    "Weight decay parameter multiplier. " +
      "The actual L2 regularization weight for the bias is wd times bias_wd_mult. " +
      "Must be >= 0.", ParamValidators.gtEq(0))
  def getBiasWdMult: Double = $(biasWdMult)

  /**
    * Whether to use a scheduler for the learning rate.
    * Default: True
    */
  val useLrScheduler : Param[String] = new Param(this, "use_lr_scheduler",
    "Whether to use a scheduler for the learning rate. ",
    ParamValidators.inArray(Array("True", "False")))
  def getUseLrScheduler: Boolean = parseTrueAndFalse(useLrScheduler)

  /**
    * Parameter specific to lr_scheduler. Ignored otherwise.
    * The number of steps between decreases of the learning rate. Must be > 0.
    * Default: 100.
    */
  val lrSchedulerStep : IntParam = new IntParam(this, "lr_scheduler_step",
    "Parameter specific to lr_scheduler. Ignored otherwise." +
      "The number of steps between decreases of the learning rate. " +
      "Must be > 0.", ParamValidators.gt(0))
  def getLrSchedulerStep: Int = $(lrSchedulerStep)

  /**
    * Parameter specific to lr_scheduler. Ignored otherwise.
    * Every lr_scheduler_step the learning rate will decrease by this quantity. Must be in (0, 1).
    * Default: 0.99.
    */
  val lrSchedulerFactor : DoubleParam = new DoubleParam(this, "lr_scheduler_factor",
    "Parameter specific to lr_scheduler. Ignored otherwise." +
      "Every lr_scheduler_step the learning rate will decrease by this quantity. " +
      "Must be in (0, 1).", ParamValidators.inRange(0, 1, false, false))
  def getLrSchedulerFactor: Double = $(lrSchedulerFactor)

  /**
    * Parameter specific to lr_scheduler. Ignored otherwise.
    * The learning rate will never decrease to a value lower than lr_scheduler_minimum_lr.
    * Must be > 0.
    * Default: 1e-5.
    */
  val lrSchedulerMinimumLr : DoubleParam = new DoubleParam(this, "lr_scheduler_minimum_lr",
    "Parameter specific to lr_scheduler. Ignored otherwise." +
      "The learning rate will never decrease to a value lower than lr_scheduler_minimum_lr. " +
      "Must be > 0.", ParamValidators.gt(0))
  def getLrSchedulerMinimumLr: Double = $(lrSchedulerMinimumLr)

  /**
    * Whether to normalize the features before training to have std_dev of 1.
    * Default: True
    */
  val normalizeData : Param[String] = new Param(this, "normalize_data",
    "Whether to normalize the features before training to have std_dev of 1. ",
    ParamValidators.inArray(Array("True", "False")))
  def getNormalizeData: Boolean = parseTrueAndFalse(normalizeData)

  /**
    * Whether regression label is normalized. Ignored in classification.
    * Default: "auto"
    */
  val normalizeLabel : Param[String] = new Param(this, "normalize_label",
    "Whether regression label is normalized. If set for classification, it will be ignored.",
    ParamValidators.inArray(Array("True", "False")))
  def getNormalizeLabel: Boolean = parseTrueAndFalse(normalizeLabel)

  /**
    * Whether to unbias the features before training so that mean is 0.
    * By default data is unbiased if use_bias is set to true.
    * Default: "auto"
    */
  val unbiasData : Param[String] = new Param(this, "unbias_data",
    "Whether to unbias the features before training so that mean is 0. " +
      "By default data is unbiased if use_bias is set to true.",
    ParamValidators.inArray(Array("True", "False")))
  def getUnbiasData: Boolean = parseTrueAndFalse(unbiasData)

  /**
    * Whether to unbias the labels before training so that mean is 0.
    * Only done for regrssion if use_bias is true. Otherwise will be ignored.
    * Default: "auto"
    */
  val unbiasLabel : Param[String] = new Param(this, "unbias_label",
    "Whether to unbias the labels before training so that mean is 0. " +
      "Only done for regrssion if use_bias is true. Otherwise will be ignored.",
    ParamValidators.inArray(Array("True", "False")))
  def getUnbiasLabel: Boolean = parseTrueAndFalse(unbiasLabel)

  /**
    * Number of data points to use for calcuating the normalizing / unbiasing terms. Must be > 0.
    * Default: 10000.
    */
  val numPointForScaler : IntParam = new IntParam(this, "num_point_for_scaler",
    "Number of data points to use for calcuating the normalizing / unbiasing terms. " +
      "Must be > 0.", ParamValidators.gt(0))
  def getNumPointForScaler: Int = $(numPointForScaler)

  /**
    * The number of epochs to wait before ending training if no improvement is made in the relevant
    * metric. The metric is the binary_classifier_model_selection_criteria if provided, otherwise
    * the metric is the same as loss. The metric is evaluated on the validation data. If no
    * validation data is provided, the metric is always the same as loss and is evaluated on the
    * training data. To disable early stopping, set early_stopping_patience to a value larger than
    * epochs. Must be > 0.
    * Default: 3.
    */
  val earlyStoppingPatience : IntParam = new IntParam(this, "early_stopping_patience",
    "The number of epochs to wait before ending training if no improvement is made in the" +
      "relevant metric. The metric is the binary_classifier_model_selection_criteria if" +
      "provided,otherwise the metric is the same as loss. The metric is evaluated on the" +
      "validation data. If no validation data is provided, the metric is always the same as loss" +
      "and is evaluated on the training data. To disable early stopping, set" +
      "early_stopping_patience to a value larger than epochs. Must be > 0.", ParamValidators.gt(0))
  def getEarlyStoppingPatience: Int = $(earlyStoppingPatience)

  /**
    * Relative tolerance to measure an improvement in loss. If the ratio of the improvement in loss
    * divided by the previous best loss is smaller than this value, early stopping will consider
    * the improvement to be zero. Must be > 0.
    * Default: 0.001.
    */
  val earlyStoppingTolerance : DoubleParam = new DoubleParam(this, "early_stopping_tolerance",
    "Relative tolerance to measure an improvement in loss. If the ratio of the improvement in" +
      "loss divided by the previous best loss is smaller than this value, early stopping will" +
      "consider the improvement to be zero. Must be > 0.", ParamValidators.gt(0))
  def getEarlyStoppingTolerance: Double = $(earlyStoppingTolerance)

  /**
    * Margin for hinge_loss. Must be > 0.
    * Default: 1.0.
    */
  val margin : DoubleParam = new DoubleParam(this, "margin",
    "Margin for hinge_loss. Must be > 0.", ParamValidators.gt(0))
  def getMargin: Double = $(margin)

  /**
    * Quantile for quantile loss. For quantile q, the model will attempt to produce predictions
    * such that true_label < prediction with probability q. Must be in (0, 1).
    * Default: 0.5.
    */
  val quantile : DoubleParam = new DoubleParam(this, "quantile",
    "Quantile for quantile loss. For quantile q, the model will attempt to produce predictions" +
      "such that true_label < prediction with probability q. " +
      "Must be in (0, 1).", ParamValidators.inRange(0, 1, false, false))
  def getQuantile: Double = $(quantile)

  /**
    * Parameter for epsilon insensitive loss type. During training and metric evaluation,
    * any error smaller than this is considered to be zero. Must be > 0.
    * Default: 0.01.
    */
  val lossInsensitivity : DoubleParam = new DoubleParam(this, "loss_insensitivity",
    "Parameter for epsilon insensitive loss type. During training and metric evaluation," +
      "any error smaller than this is considered to be zero. Must be > 0.", ParamValidators.gt(0))
  def getLossInsensitivity: Double = $(lossInsensitivity)

  /**
    * Parameter for Huber loss. During training and metric evaluation, compute L2 loss for errors
    * smaller than delta and L1 loss for errors larger than delta. Must be > 0.
    * Default: 1.0.
    */
  val huberDelta : DoubleParam = new DoubleParam(this, "huber_delta",
    "Parameter for Huber loss. During training and metric evaluation, compute L2 loss for" +
      "errors smaller than delta and L1 loss for errors larger than delta." +
      "Must be > 0.", ParamValidators.gt(0))
  def getHuberDelta: Double = $(huberDelta)

  /**
    * The value of beta to use when calculating F score metrics for binary or multiclass
    * classification. Also used if binary_classifier_model_selection_criteria is f_beta.
    * Must be > 0.
    * Default: 1.0.
    */
  val fBeta : DoubleParam = new DoubleParam(this, "f_beta",
    "The value of beta to use when calculating F score metrics for binary or multiclass" +
      "classification. Also used if binary_classifier_model_selection_criteria is f_beta." +
      "Must be > 0.", ParamValidators.gt(0))
  def getFBeta: Double = $(fBeta)
}

object LinearLearnerSageMakerEstimator {