nni/algorithms/compression/pytorch/pruning/auto_compress_pruner.py [118:147]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        self._experiment_data_dir = experiment_data_dir
        if not os.path.exists(self._experiment_data_dir):
            os.makedirs(self._experiment_data_dir)

    def validate_config(self, model, config_list):
        """
        Parameters
        ----------
        model : torch.nn.Module
            Model to be pruned
        config_list : list
            List on pruning configs
        """

        if self._base_algo == 'level':
            schema = PrunerSchema([{
                Optional('sparsity'): And(float, lambda n: 0 < n < 1),
                Optional('op_types'): [str],
                Optional('op_names'): [str],
                Optional('exclude'): bool
            }], model, _logger)
        elif self._base_algo in ['l1', 'l2', 'fpgm']:
            schema = PrunerSchema([{
                Optional('sparsity'): And(float, lambda n: 0 < n < 1),
                'op_types': ['Conv2d'],
                Optional('op_names'): [str],
                Optional('exclude'): bool
            }], model, _logger)

        schema.validate(config_list)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



nni/algorithms/compression/pytorch/pruning/simulated_annealing_pruner.py [103:132]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        self._experiment_data_dir = experiment_data_dir
        if not os.path.exists(self._experiment_data_dir):
            os.makedirs(self._experiment_data_dir)

    def validate_config(self, model, config_list):
        """
        Parameters
        ----------
        model : torch.nn.Module
            Model to be pruned
        config_list : list
            List on pruning configs
        """

        if self._base_algo == 'level':
            schema = PrunerSchema([{
                Optional('sparsity'): And(float, lambda n: 0 < n < 1),
                Optional('op_types'): [str],
                Optional('op_names'): [str],
                Optional('exclude'): bool
            }], model, _logger)
        elif self._base_algo in ['l1', 'l2', 'fpgm']:
            schema = PrunerSchema([{
                Optional('sparsity'): And(float, lambda n: 0 < n < 1),
                'op_types': ['Conv2d'],
                Optional('op_names'): [str],
                Optional('exclude'): bool
            }], model, _logger)

        schema.validate(config_list)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



