From ba75c7dc978625eaa02da048511c59444740b01b Mon Sep 17 00:00:00 2001 From: lasseedfast Date: Thu, 18 Sep 2025 15:39:12 +0200 Subject: [PATCH] Returning the whole ChatReponse object, so need to update other code... --- _llm/llm.py | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/_llm/llm.py b/_llm/llm.py index 93999e8..f5d1fa0 100644 --- a/_llm/llm.py +++ b/_llm/llm.py @@ -106,8 +106,8 @@ class LLM: headers["X-Chosen-Backend"] = self.chosen_backend # If connected over VPN - self.on_vpn = True - if on_vpn: + self.on_vpn = on_vpn + if self.on_vpn: self.host_url = f"{os.getenv('LLM_URL')}:{os.getenv('LLM_PORT')}" else: self.host_url = os.getenv("LLM_API_URL").rstrip("/api/chat/") @@ -151,7 +151,6 @@ class LLM: if model == "embeddings": self.messages = [{"role": "user", "content": query}] model = self.get_model("embeddings") - print_red(f"Using embeddings model: {model}") return model if messages: @@ -525,7 +524,7 @@ class LLM: model = self._prepare_messages_and_model( query, user_input, context, messages, images, model ) - print_red(model) + temperature = temperature if temperature else self.options["temperature"] if think is None: think = self.think @@ -533,7 +532,9 @@ class LLM: try: headers = self._build_headers(model) options = self._get_options(temperature) - response = self._call_remote_api( + + # Call Ollama server + response: ChatResponse = self._call_remote_api( model, tools, stream, options, format, headers, think=think ) @@ -555,18 +556,11 @@ class LLM: if not self.chat: self.messages = [self.messages[0]] - #* Add attribute 'content' for compatibility with old version of _llm.py - #* This is a copy of response.message.content - #TODO Remove this when _llm.py is updated - # Show a warning if using old version of _llm.py Warning = ("Please use reposen.message.content when ising _llm") - - response.content = response.message - return response else: return "An error occurred." - except Exception as e: + except Exception as e: traceback.print_exc() if self.local_available: