From 25fd04085b039605702d45c12a3c31f0f8f4cdc6 Mon Sep 17 00:00:00 2001 From: Cebtenzzre Date: Tue, 6 Jun 2023 20:08:31 -0400 Subject: [PATCH] llama: fix exception in Llama.__del__ --- llama_cpp/llama.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index e53c9c8ae..a3b268b10 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -1637,12 +1637,14 @@ def create_chat_completion( ) return self._convert_completion_to_chat(completion_or_chunks, stream=stream) # type: ignore - def _free_model(self): - if hasattr(self, "model") and self.model is not None: - llama_cpp.llama_free_model(self.model) + def _free_model(self, *, _lfree_model=llama_cpp._lib.llama_free_model, _free=llama_cpp._lib.llama_free): + model = getattr(self, 'model', None) + if model is not None: + _lfree_model(model) self.model = None - if hasattr(self, "ctx") and self.ctx is not None: - llama_cpp.llama_free(self.ctx) + ctx = getattr(self, 'ctx', None) + if ctx is not None: + _free(ctx) self.ctx = None def __del__(self):