mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Disable mmap when applying lora weights. Closes #107
This commit is contained in:
@@ -79,7 +79,7 @@ class Llama:
|
||||
self.params.f16_kv = f16_kv
|
||||
self.params.logits_all = logits_all
|
||||
self.params.vocab_only = vocab_only
|
||||
self.params.use_mmap = use_mmap
|
||||
self.params.use_mmap = use_mmap if lora_path is None else False
|
||||
self.params.use_mlock = use_mlock
|
||||
self.params.embedding = embedding
|
||||
|
||||
|
||||
Reference in New Issue
Block a user