Now single_pdf example is working
This commit is contained in:
parent
6c9aececd5
commit
b6bce5eb8a
2
.env
2
.env
@ -1,2 +1,2 @@
|
||||
OPENAI_API_KEY='' # Optional
|
||||
LLM_MODEL='' # Optional, either an Ollama or Open AI model name
|
||||
LLM_MODEL='llama3.2:3b-instruct-q5_K_M' # Optional, either an Ollama or Open AI model name
|
||||
|
0
__init__.py
Normal file
0
__init__.py
Normal file
@ -320,7 +320,6 @@ class Highlighter:
|
||||
return pdf_buffer
|
||||
|
||||
async def get_sentences_with_llm(self, text, user_input):
|
||||
print(text)
|
||||
prompt = GET_SENTENCES_PROMPT.format(text=text, user_input=user_input)
|
||||
|
||||
answer = await self.llm.generate(prompt)
|
||||
|
BIN
highlighted_example_pdf_document.pdf
Normal file
BIN
highlighted_example_pdf_document.pdf
Normal file
Binary file not shown.
@ -10,7 +10,6 @@ pages = [1, 2]
|
||||
|
||||
# Initialize the Highlighter
|
||||
highlighter = Highlighter(
|
||||
model='llama3.1',
|
||||
comment=True # Enable comments to understand the context
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user