Merge pull request #2267 from modelscope/dev_sx2

fix model_wrapper and seaco finetune
This commit is contained in:
Shi Xian 2024-12-05 15:15:38 +08:00 committed by GitHub
commit 0efc87352c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 2 additions and 1 deletions

View File

@ -78,5 +78,6 @@ torchrun $DISTRIBUTED_ARGS \
++train_conf.avg_nbest_model=10 \
++train_conf.use_deepspeed=false \
++train_conf.deepspeed_config=${deepspeed_config} \
++train_conf.find_unused_parameters=true \
++optim_conf.lr=0.0002 \
++output_dir="${output_dir}" &> ${log_file}

View File

@ -134,7 +134,7 @@ def main(**kwargs):
**kwargs.get("train_conf"),
)
model = trainer.warp_model(model)
model = trainer.warp_model(model, **kwargs)
kwargs["device"] = int(os.environ.get("LOCAL_RANK", 0))
trainer.device = int(os.environ.get("LOCAL_RANK", 0))