Refactor _llm.py to handle UnicodeDecodeError when decoding chunks and add timeout to LLM API request

main
lasseedfast 1 year ago
parent 7b557d1ad6
commit 96c4a13aa5
  1. 4
      _llm.py

@ -37,7 +37,10 @@ class LLM:
buffer = ""
for chunk in response.iter_content(chunk_size=64):
if chunk:
try:
buffer += chunk.decode('utf-8')
except UnicodeDecodeError:
continue
while "\n" in buffer:
line, buffer = buffer.split("\n", 1)
if line:
@ -74,6 +77,7 @@ class LLM:
json=data,
auth=HTTPBasicAuth(os.getenv('LLM_API_USER'), os.getenv('LLM_API_PWD_LASSE')),
stream=stream,
timeout= 3600,
)
if response.status_code == 404:

Loading…
Cancel
Save