diff --git a/paddlenlp/trainer/auto_trainer.py b/paddlenlp/trainer/auto_trainer.py index ea01b7104e81..33c21bb1fc49 100644 --- a/paddlenlp/trainer/auto_trainer.py +++ b/paddlenlp/trainer/auto_trainer.py @@ -105,7 +105,15 @@ def loss_func(loss, outputs): model = kwargs["model"] for param in model.parameters(): if not param._is_initialized(): - param.initialize() + try: + param.initialize() + except Exception as e: + # NOTE(zhangwl):maybe param is not initialized and param init_func is set in later.user need set_init_func before auto_trainer + logger.warning( + f"AutoTrainer requires all parameters to be initialized when auto_trainer init, but failed to initialize parameter {param.name} {param}.\n" + + "Please check param init func.\n" + + f"The original exception message is:\n{str(e)}" + ) kwargs["model"] = model super().__init__(*args, **kwargs)