Skip to content

Commit

Permalink
fix : Missing LoRA adapter after API change (#1630)
Browse files Browse the repository at this point in the history
  • Loading branch information
shamitv authored Jul 31, 2024
1 parent 8a12c9f commit 1f0b9a2
Showing 1 changed file with 6 additions and 3 deletions.
9 changes: 6 additions & 3 deletions llama_cpp/llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -2083,11 +2083,14 @@ def pooling_type(self) -> str:

def close(self) -> None:
"""Explicitly free the model from memory."""
self._stack.close()
if hasattr(self,'_stack'):
if self._stack is not None:
self._stack.close()

def __del__(self) -> None:
if self._lora_adapter is not None:
llama_cpp.llama_lora_adapter_free(self._lora_adapter)
if hasattr(self,'_lora_adapter'):
if self._lora_adapter is not None:
llama_cpp.llama_lora_adapter_free(self._lora_adapter)
self.close()

@staticmethod
Expand Down

0 comments on commit 1f0b9a2

Please sign in to comment.