causalml/inference/meta/rlearner.py [87:125]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        )

    def fit(self, X, treatment, y, p=None, sample_weight=None, verbose=True):
        """Fit the treatment effect and outcome models of the R learner.

        Args:
            X (np.matrix or np.array or pd.Dataframe): a feature matrix
            treatment (np.array or pd.Series): a treatment vector
            y (np.array or pd.Series): an outcome vector
            p (np.ndarray or pd.Series or dict, optional): an array of propensity scores of float (0,1) in the
                single-treatment case; or, a dictionary of treatment groups that map to propensity vectors of
                float (0,1); if None will run ElasticNetPropensityModel() to generate the propensity scores.
            sample_weight (np.array or pd.Series, optional): an array of sample weights indicating the
                weight of each observation for `effect_learner`. If None, it assumes equal weight.
            verbose (bool, optional): whether to output progress logs
        """
        X, treatment, y = convert_pd_to_np(X, treatment, y)
        check_treatment_vector(treatment, self.control_name)
        if sample_weight is not None:
            assert len(sample_weight) == len(
                y
            ), "Data length must be equal for sample_weight and the input data"
            sample_weight = convert_pd_to_np(sample_weight)
        self.t_groups = np.unique(treatment[treatment != self.control_name])
        self.t_groups.sort()

        if p is None:
            self._set_propensity_models(X=X, treatment=treatment, y=y)
            p = self.propensity
        else:
            p = self._format_p(p, self.t_groups)

        self._classes = {group: i for i, group in enumerate(self.t_groups)}
        self.models_tau = {group: deepcopy(self.model_tau) for group in self.t_groups}
        self.vars_c = {}
        self.vars_t = {}

        if verbose:
            logger.info("generating out-of-fold CV outcome estimates")
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



causalml/inference/meta/rlearner.py [428:466]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            )

    def fit(self, X, treatment, y, p=None, sample_weight=None, verbose=True):
        """Fit the treatment effect and outcome models of the R learner.

        Args:
            X (np.matrix or np.array or pd.Dataframe): a feature matrix
            treatment (np.array or pd.Series): a treatment vector
            y (np.array or pd.Series): an outcome vector
            p (np.ndarray or pd.Series or dict, optional): an array of propensity scores of float (0,1) in the
                single-treatment case; or, a dictionary of treatment groups that map to propensity vectors of
                float (0,1); if None will run ElasticNetPropensityModel() to generate the propensity scores.
            sample_weight (np.array or pd.Series, optional): an array of sample weights indicating the
                weight of each observation for `effect_learner`. If None, it assumes equal weight.
            verbose (bool, optional): whether to output progress logs
        """
        X, treatment, y = convert_pd_to_np(X, treatment, y)
        check_treatment_vector(treatment, self.control_name)
        if sample_weight is not None:
            assert len(sample_weight) == len(
                y
            ), "Data length must be equal for sample_weight and the input data"
            sample_weight = convert_pd_to_np(sample_weight)
        self.t_groups = np.unique(treatment[treatment != self.control_name])
        self.t_groups.sort()

        if p is None:
            self._set_propensity_models(X=X, treatment=treatment, y=y)
            p = self.propensity
        else:
            p = self._format_p(p, self.t_groups)

        self._classes = {group: i for i, group in enumerate(self.t_groups)}
        self.models_tau = {group: deepcopy(self.model_tau) for group in self.t_groups}
        self.vars_c = {}
        self.vars_t = {}

        if verbose:
            logger.info("generating out-of-fold CV outcome estimates")
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



