fix bug when loading old lora model
This commit is contained in:
parent
6739f529f5
commit
7a71b0dd12
|
|
@ -84,7 +84,7 @@ else:
|
||||||
else:
|
else:
|
||||||
device_map = {'': 0}
|
device_map = {'': 0}
|
||||||
print('Device map for lora:', device_map)
|
print('Device map for lora:', device_map)
|
||||||
model = PeftModel.from_pretrained(model, ft_config.lora_apply_dir, device_map=device_map, torch_dtype=torch.float32)
|
model = PeftModel.from_pretrained(model, ft_config.lora_apply_dir, device_map=device_map, torch_dtype=torch.float32, is_trainable=True)
|
||||||
print(ft_config.lora_apply_dir, 'loaded')
|
print(ft_config.lora_apply_dir, 'loaded')
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue