Now single_pdf example is working

This commit is contained in:
lasseedfast 2024-10-16 14:45:05 +02:00
parent 6c9aececd5
commit b6bce5eb8a
6 changed files with 1 additions and 3 deletions

2
.env
View File

@ -1,2 +1,2 @@
OPENAI_API_KEY='' # Optional OPENAI_API_KEY='' # Optional
LLM_MODEL='' # Optional, either an Ollama or Open AI model name LLM_MODEL='llama3.2:3b-instruct-q5_K_M' # Optional, either an Ollama or Open AI model name

0
__init__.py Normal file
View File

View File

@ -320,7 +320,6 @@ class Highlighter:
return pdf_buffer return pdf_buffer
async def get_sentences_with_llm(self, text, user_input): async def get_sentences_with_llm(self, text, user_input):
print(text)
prompt = GET_SENTENCES_PROMPT.format(text=text, user_input=user_input) prompt = GET_SENTENCES_PROMPT.format(text=text, user_input=user_input)
answer = await self.llm.generate(prompt) answer = await self.llm.generate(prompt)

Binary file not shown.

View File

@ -10,7 +10,6 @@ pages = [1, 2]
# Initialize the Highlighter # Initialize the Highlighter
highlighter = Highlighter( highlighter = Highlighter(
model='llama3.1',
comment=True # Enable comments to understand the context comment=True # Enable comments to understand the context
) )