in optimum/neuron/trainers.py [0:0]
def __init__(self, *args, **kwargs):
if not isinstance(self, Trainer):
raise TypeError(f"{self.__class__.__name__} can only be mixed with Trainer subclasses.")
training_args = kwargs.get("args", None)
if training_args is None and len(args) >= 2:
training_args = args[1]
self.use_amp = False
if training_args is not None:
if training_args.bf16:
if training_args.half_precision_backend == "amp":
self.use_amp = True
if is_precompilation():
self.prepare_for_precompilation(training_args)
super().__init__(*args, **kwargs)
if not isinstance(self.args, NeuronTrainingArguments):
raise ValueError(
f"The NeuronTrainer only accept NeuronTrainingArguments, but {type(self.args)} was provided."
)
# We need to change which process can be seen as "world process zero" to make sure the proper metrics
# (eg.g loss) are logged and sent to the callbacks (for instance WandbCallback).
self.state = TrainerState(
is_local_process_zero=self.is_local_process_zero(),
is_world_process_zero=is_main_worker_for_metrics(),
)
if self.args.local_rank <= 0:
logger.setLevel(logging.INFO)
# Make the model Neuron-compatible for generation.
patch_generation_mixin_to_neuron_generation_mixin(self.model)