From 7a71b0dd12518983826d17d5768d64eda75f29e0 Mon Sep 17 00:00:00 2001 From: John Smith Date: Mon, 17 Apr 2023 12:16:21 +0800 Subject: [PATCH] fix bug when loading old lora model --- finetune.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/finetune.py b/finetune.py index b747eb9..9874d1b 100644 --- a/finetune.py +++ b/finetune.py @@ -84,7 +84,7 @@ else: else: device_map = {'': 0} print('Device map for lora:', device_map) - model = PeftModel.from_pretrained(model, ft_config.lora_apply_dir, device_map=device_map, torch_dtype=torch.float32) + model = PeftModel.from_pretrained(model, ft_config.lora_apply_dir, device_map=device_map, torch_dtype=torch.float32, is_trainable=True) print(ft_config.lora_apply_dir, 'loaded')