fix bug when loading old lora model

This commit is contained in:
John Smith 2023-04-17 12:16:21 +08:00
parent 6739f529f5
commit 7a71b0dd12
1 changed files with 1 additions and 1 deletions

View File

@ -84,7 +84,7 @@ else:
else:
device_map = {'': 0}
print('Device map for lora:', device_map)
model = PeftModel.from_pretrained(model, ft_config.lora_apply_dir, device_map=device_map, torch_dtype=torch.float32)
model = PeftModel.from_pretrained(model, ft_config.lora_apply_dir, device_map=device_map, torch_dtype=torch.float32, is_trainable=True)
print(ft_config.lora_apply_dir, 'loaded')