def getNumFactors: Int = $()

in sagemaker-spark-sdk/src/main/scala/com/amazonaws/services/sagemaker/sparksdk/algorithms/FactorizationMachinesSageMakerEstimator.scala [47:272]


  def getNumFactors: Int = $(numFactors)

  /**
    * Whether training is for binary classification or regression.
    * Supported options: "binary_classifier", and "regressor".
    * Required
    */
  private[algorithms] val predictorType : Param[String] = new Param(this, "predictor_type",
    "Whether training is for binary classification or regression. " +
      "Supported options: 'binary_classifier', and 'regressor'.",
    ParamValidators.inArray(Array("binary_classifier", "regressor")))

  /**
    * Number of training epochs to run. Must be > 0.
    * Default: 1
    */
  val epochs : IntParam = new IntParam(this, "epochs",
    "Number of training epochs to run. Must be > 0.",
    ParamValidators.gt(0))
  def getEpochs: Int = $(epochs)

  /**
    * Clip the gradient by projecting onto the box [-clip_gradient, +clip_gradient].
    */
  val clipGradient : DoubleParam = new DoubleParam(this, "clip_gradient",
    "Clip the gradient by projecting onto the box [-clip_gradient, +clip_gradient].")
  def getClipGradient: Double = $(clipGradient)

  /**
    * Small value to avoid division by 0.
    */
  val eps : DoubleParam = new DoubleParam(this, "eps",
    "Small value to avoid division by 0.")
  def getEps: Double = $(eps)

  /**
    * If set, multiplies the gradient with rescale_grad before updating.
    * Often choose to be 1.0/batch_size.
    */
  val rescaleGrad : DoubleParam = new DoubleParam(this, "rescale_grad",
    "Multiplies the gradient with this value before updating")
  def getRescaleGrad: Double = $(rescaleGrad)

  /**
    * Non-negative learning rate for the bias term.
    * Suggested value range: [1e-8, 512].
    * Default: 0.1
    */
  val biasLr : DoubleParam = new DoubleParam(this, "bias_lr",
    "Multiplies the gradient with this value before updating. Must be >= 0",
    ParamValidators.gtEq(0))
  def getBiasLr: Double = $(biasLr)

  /**
    * Non-negative learning rate for linear terms.
    * Suggested value range: [1e-8, 512].
    * Default: 0.001
    */
  val linearLr : DoubleParam = new DoubleParam(this, "linear_lr",
    "Learning rate for linear terms. Must be >= 0",
    ParamValidators.gtEq(0))
  def getLinearLr: Double = $(linearLr)

  /**
    * Non-negative learning rate for factorization terms.
    * Suggested value range: [1e-8, 512].
    * Default: 0.0001
    */
  val factorsLr : DoubleParam = new DoubleParam(this, "factors_lr",
    "Learning rate for factorization terms. Must be >= 0",
    ParamValidators.gtEq(0))
  def getFactorsLr: Double = $(factorsLr)

  /**
    * Non-negative weight decay for the bias term.
    * Suggested value range: [1e-8, 512].
    * Default: 0.01
    */
  val biasWd : DoubleParam = new DoubleParam(this, "bias_wd",
    "Weight decay for the bias term. Must be >= 0",
    ParamValidators.gtEq(0))
  def getBiasWd: Double = $(biasWd)

  /**
    * Non-negative weight decay for linear terms.
    * Suggested value range: [1e-8, 512].
    * Default: 0.001
    */
  val linearWd : DoubleParam = new DoubleParam(this, "linear_wd",
    "Weight decay for linear terms. Must be >= 0",
    ParamValidators.gtEq(0))
  def getLinearWd: Double = $(linearWd)

  /**
    * Non-negative weight decay for factorization terms.
    * Suggested value range: [1e-8, 512].
    * Default: 0.00001
    */
  val factorsWd : DoubleParam = new DoubleParam(this, "factors_wd",
    "Weight decay for factorization terms. Must be >= 0",
    ParamValidators.gtEq(0))
  def getFactorsWd: Double = $(factorsWd)

  /**
    * Initialization method for the bias term.
    * Supported options: "normal", "uniform" or "constant".
    * uniform: random weights sampled uniformly from range [-bias_init_scale, +bias_init_scale]
    * normal: random weights from normal distribution with mean 0 and std dev "bias_init_sigma"
    * constant: weights initialized to "bias_init_value"
    * Default: "normal".
    */
  val biasInitMethod : Param[String] = new Param(this, "bias_init_method",
    "Initialization method for the bias supports 'normal', 'uniform' and 'constant'.",
    ParamValidators.inArray(Array("normal", "uniform", "constant")))
  def getBiasInitMethod: String = $(biasInitMethod)

  /**
    * Non-negative range for initialization of the bias term when "bias_init_method" is "uniform".
    * Suggested value range: [1e-8, 512].
    */
  val biasInitScale : DoubleParam = new DoubleParam(this, "bias_init_scale",
    "Range for bias term uniform initialization. Must be >= 0.",
    ParamValidators.gtEq(0))
  def getBiasInitScale: Double = $(biasInitScale)

  /**
    * Non-negative standard deviation of the bias term when "bias_init_method" is "normal".
    * Suggested value range: [1e-8, 512].
    * Default: 0.01.
    */
  val biasInitSigma : DoubleParam = new DoubleParam(this, "bias_init_sigma",
    "Standard deviation for initialization of the bias terms. Must be >= 0.",
    ParamValidators.gtEq(0))
  def getBiasInitSigma: Double = $(biasInitSigma)

  /**
    * Initial value of the bias term when "bias_init_method" is "constant".
    * Suggested value range: [1e-8, 512]
    */
  val biasInitValue : DoubleParam = new DoubleParam(this, "bias_init_value",
    "Initial value for the bias term.")
  def getBiasInitValue: Double = $(biasInitValue)

  /**
    * Initialization method for linear term.
    * Supported options: "normal", "uniform" or "constant".
    * uniform: random weights sampled uniformly from range [-linear_init_scale, +linear_init_scale]
    * normal: random weights from normal distribution with mean 0 and std dev "linear_init_sigma"
    * constant: weights initialized to "linear_init_value"
    * Default: "normal".
    */
  val linearInitMethod : Param[String] = new Param(this, "linear_init_method",
    "Initialization method for linear term. Supported options: 'normal', 'uniform' and 'constant'.",
    ParamValidators.inArray(Array("normal", "uniform", "constant")))
  def getLinearInitMethod: String = $(linearInitMethod)

  /**
    * Non-negative range for initialization of linear term when "linear_init_method" is "uniform".
    * Suggested value range: [1e-8, 512].
    */
  val linearInitScale : DoubleParam = new DoubleParam(this, "linear_init_scale",
    "Range for linear term uniform initialization. Must be >= 0.",
    ParamValidators.gtEq(0))
  def getLinearInitScale: Double = $(linearInitScale)

  /**
    * Non-negative standard deviation of linear term when "linear_init_method" is "normal".
    * Suggested value range: [1e-8, 512].
    * Default: 0.01.
    */
  val linearInitSigma : DoubleParam = new DoubleParam(this, "linear_init_sigma",
    "Standard deviation for initialization of linear terms. Must be >= 0.",
    ParamValidators.gtEq(0))
  def getLinearInitSigma: Double = $(linearInitSigma)

  /**
    * Initial value of linear term when "linear_init_method" is "constant".
    * Suggested value range: [1e-8, 512]
    */
  val linearInitValue : DoubleParam = new DoubleParam(this, "linear_init_value",
    "Initial value for linear term.")
  def getLinearInitValue: Double = $(linearInitValue)


  /**
    * Initialization method for factorization terms.
    * Supported options: "normal", "uniform" or "constant".
    * uniform: random weights sampled uniformly from [-factors_init_scale, +factors_init_scale]
    * normal: random weights from normal distribution with mean 0 and std dev "factors_init_sigma"
    * constant: weights initialized to "factors_init_value"
    * Default: "normal".
    */
  val factorsInitMethod : Param[String] = new Param(this, "factors_init_method",
    "Initialization method for factorization terms supports 'normal', 'uniform' and 'constant'.",
    ParamValidators.inArray(Array("normal", "uniform", "constant")))
  def getFactorsInitMethod: String = $(factorsInitMethod)

  /**
    * Non-negative range for factorization terms initialization in "uniform" (factors_init_method).
    * Suggested value range: [1e-8, 512].
    */
  val factorsInitScale : DoubleParam = new DoubleParam(this, "factors_init_scale",
    "Range for factorization terms uniform initialization. Must be >= 0.",
    ParamValidators.gtEq(0))
  def getFactorsInitScale: Double = $(factorsInitScale)

  /**
    * Non-negative standard deviation of factorization terms when "factors_init_method" is "normal".
    * Suggested value range: [1e-8, 512].
    * Default: 0.001.
    */
  val factorsInitSigma : DoubleParam = new DoubleParam(this, "factors_init_sigma",
    "Standard deviation for initialization of factorization terms. Must be >= 0.",
    ParamValidators.gtEq(0))
  def getFactorsInitSigma: Double = $(factorsInitSigma)

  /**
    * Initial value of factorization terms when "factors_init_method" is "constant".
    * Suggested value range: [1e-8, 512]
    */
  val factorsInitValue : DoubleParam = new DoubleParam(this, "factors_init_value",
    "Initial value for factorization terms.")
  def getFactorsInitValue: Double = $(factorsInitValue)
}

object FactorizationMachinesSageMakerEstimator {