def _neuron_config_init()

in optimum/neuron/modeling_traced.py [0:0]


    def _neuron_config_init(cls, config: "PretrainedConfig") -> "NeuronDefaultConfig":
        """
        Builds a `NeuronDefaultConfig` with an instance of the `PretrainedConfig` and the task.
        """
        if not hasattr(config, "neuron"):
            raise ValueError(
                "Unable to identify neuron configuration with the keyword `neuron`, make sure that your config file contains necessary information"
            )

        neuron_config = config.neuron
        # Fetch compiler information
        compiler_type = neuron_config.get("compiler_type")
        compiler_version = neuron_config.get("compiler_version")
        tensor_parallel_size = neuron_config.get("tensor_parallel_size", 1)

        # Fetch mandatory shapes from config
        compile_shapes = {
            key.replace("static_", ""): value
            for (key, value) in config.to_diff_dict().get("neuron").items()
            if key.startswith("static_")
        }

        # Neuron config constructor
        task = neuron_config.get("task", None) or TasksManager.infer_task_from_model(cls.auto_model_class)
        task = TasksManager.map_from_synonym(task)
        model_type = neuron_config.get("model_type", None) or config.model_type
        model_type = model_type.replace("_", "-")
        neuron_config_constructor = TasksManager.get_exporter_config_constructor(
            model_type=model_type,
            exporter="neuron",
            task=task,
            library_name=cls.library_name,
        )

        compile_shapes = InputShapesArguments(**compile_shapes)
        return neuron_config_constructor(
            config,
            dynamic_batch_size=neuron_config.get("dynamic_batch_size", False),
            compiler_type=compiler_type,
            compiler_version=compiler_version,
            tensor_parallel_size=tensor_parallel_size,
            input_shapes=compile_shapes,
            output_hidden_states=neuron_config.get("output_hidden_states", False),
        )