commit
22a97d588e
3 changed files with 170 additions and 0 deletions
@ -0,0 +1,5 @@ |
|||||||
|
# Exclude .venv directory |
||||||
|
.venv/ |
||||||
|
# Exclude files in pages folder |
||||||
|
pages/* |
||||||
|
.DS_Store |
||||||
@ -0,0 +1,118 @@ |
|||||||
|
from pydantic import BaseModel, Field |
||||||
|
import subprocess |
||||||
|
|
||||||
|
import streamlit as st |
||||||
|
|
||||||
|
import ollama |
||||||
|
from ollama import ChatResponse |
||||||
|
|
||||||
|
MODEL = '<model>' # Type "ollama list" in terminal to see available models |
||||||
|
|
||||||
|
class PythonScript(BaseModel): |
||||||
|
script: str = Field(description="The Python script to be executed.") |
||||||
|
comments: str = Field(description="Any additional notes or comments about the script.") |
||||||
|
external_libraries: list[str] = Field(description="List of external libraries used in the script.") |
||||||
|
|
||||||
|
|
||||||
|
class LLM: |
||||||
|
def __init__(self, system_message: str, model: str = MODEL): |
||||||
|
self.system_message = system_message |
||||||
|
self.messages = [ |
||||||
|
{"role": "system", "content": system_message} |
||||||
|
] |
||||||
|
self.model = model |
||||||
|
|
||||||
|
def generate(self, message: str, format: str) -> ChatResponse: |
||||||
|
""" |
||||||
|
Generate a chat response using the Ollama model. |
||||||
|
|
||||||
|
This method appends the user message to the conversation history, sends it to the |
||||||
|
Ollama chat model, and appends the assistant's response back to the history. |
||||||
|
|
||||||
|
Args: |
||||||
|
message (str): The user's input message to send to the chat model. |
||||||
|
format (str): The desired format for the response output. |
||||||
|
|
||||||
|
Returns: |
||||||
|
ChatResponse: The complete response object from the Ollama chat model, |
||||||
|
containing the assistant's reply and metadata. |
||||||
|
|
||||||
|
Note: |
||||||
|
The method uses a low temperature (0.1) for more deterministic responses |
||||||
|
and automatically maintains conversation history in self.messages. |
||||||
|
""" |
||||||
|
self.messages.append({"role": "user", "content": message}) |
||||||
|
|
||||||
|
response: ChatResponse = ollama.chat( |
||||||
|
model=self.model, |
||||||
|
messages=self.messages, |
||||||
|
format=format, |
||||||
|
options={'temperature': 0.1} |
||||||
|
) |
||||||
|
self.messages.append({"role": "assistant", "content": response.message.content}) |
||||||
|
return response |
||||||
|
|
||||||
|
|
||||||
|
# Initialize chat history in session state |
||||||
|
if 'chat_history' not in st.session_state: |
||||||
|
st.session_state.chat_history = [] |
||||||
|
if 'current_code' not in st.session_state: |
||||||
|
st.session_state.current_code = None |
||||||
|
if 'current_comments' not in st.session_state: |
||||||
|
st.session_state.current_comments = None |
||||||
|
if 'current_libraries' not in st.session_state: |
||||||
|
st.session_state.current_libraries = [] |
||||||
|
|
||||||
|
st.title("Python Script Generator Chat") |
||||||
|
|
||||||
|
# Display chat history |
||||||
|
for i, (role, message) in enumerate(st.session_state.chat_history): |
||||||
|
if role == 'user': |
||||||
|
st.chat_message("user").write(message) |
||||||
|
else: |
||||||
|
st.chat_message("assistant").write(message) |
||||||
|
|
||||||
|
user_input = st.chat_input("Describe what you want help doing or give feedback on the script") |
||||||
|
|
||||||
|
if user_input: |
||||||
|
st.session_state.chat_history.append(('user', user_input)) |
||||||
|
st.chat_message("user").write(user_input) |
||||||
|
# Build system message and conversation context |
||||||
|
system_message = """ |
||||||
|
You are a Python coder. Your task is to construct a Python script based on the user's input and feedback. |
||||||
|
The script should be in a form of a Streamlit app, so make use of the Streamlit library for user interface elements. |
||||||
|
Example: If a user want to upload a file, user st.file_uploader. If the user wants to input text, use st.text_input. Etc. |
||||||
|
**IMPORTANT! The user can not change anything in the script, only interact with it via Streamlit UI elements.** |
||||||
|
Therefore, don't user placeholders like "your_file" or "your_text". Instead, use Streamlit UI elements to get the input from the user. |
||||||
|
If the user gives feedback, improve the previous script accordingly. |
||||||
|
""" |
||||||
|
# Build conversation context |
||||||
|
conversation = "\n".join([ |
||||||
|
f"User: {msg}" if role == 'user' else f"Assistant: {msg}" |
||||||
|
for role, msg in st.session_state.chat_history |
||||||
|
]) |
||||||
|
llm = LLM(system_message=system_message) |
||||||
|
response = llm.generate(conversation, format=PythonScript.model_json_schema()) |
||||||
|
answer = PythonScript.model_validate_json(response.message.content) |
||||||
|
code = answer.script |
||||||
|
comments = answer.comments |
||||||
|
libraries = answer.external_libraries |
||||||
|
st.session_state.current_code = code |
||||||
|
st.session_state.current_comments = comments |
||||||
|
st.session_state.current_libraries = libraries |
||||||
|
st.session_state.chat_history.append(('assistant', comments + "\n\n" + code)) |
||||||
|
st.chat_message("assistant").write(comments) |
||||||
|
st.code(code, language='python') |
||||||
|
for library in libraries: |
||||||
|
try: |
||||||
|
__import__(library) |
||||||
|
except ImportError: |
||||||
|
st.write(f"Installing {library}...") |
||||||
|
subprocess.check_call(["pip", "install", library]) |
||||||
|
with open("pages/user_app.py", "w") as f: |
||||||
|
f.write(code) |
||||||
|
|
||||||
|
elif st.session_state.current_code: |
||||||
|
st.chat_message("assistant").write(st.session_state.current_comments) |
||||||
|
with st.expander("View the code"): |
||||||
|
st.code(st.session_state.current_code, language='python') |
||||||
@ -0,0 +1,47 @@ |
|||||||
|
altair==5.5.0 |
||||||
|
annotated-types==0.7.0 |
||||||
|
anyio==4.9.0 |
||||||
|
attrs==25.3.0 |
||||||
|
blinker==1.9.0 |
||||||
|
cachetools==5.5.2 |
||||||
|
certifi==2025.4.26 |
||||||
|
charset-normalizer==3.4.2 |
||||||
|
click==8.2.1 |
||||||
|
exceptiongroup==1.3.0 |
||||||
|
gitdb==4.0.12 |
||||||
|
GitPython==3.1.44 |
||||||
|
h11==0.16.0 |
||||||
|
httpcore==1.0.9 |
||||||
|
httpx==0.28.1 |
||||||
|
idna==3.10 |
||||||
|
Jinja2==3.1.6 |
||||||
|
jsonschema==4.24.0 |
||||||
|
jsonschema-specifications==2025.4.1 |
||||||
|
MarkupSafe==3.0.2 |
||||||
|
narwhals==1.41.1 |
||||||
|
numpy==2.2.6 |
||||||
|
ollama==0.5.1 |
||||||
|
packaging==24.2 |
||||||
|
pandas==2.3.0 |
||||||
|
pillow==11.2.1 |
||||||
|
protobuf==6.31.1 |
||||||
|
pyarrow==20.0.0 |
||||||
|
pydantic==2.11.5 |
||||||
|
pydantic_core==2.33.2 |
||||||
|
pydeck==0.9.1 |
||||||
|
python-dateutil==2.9.0.post0 |
||||||
|
pytz==2025.2 |
||||||
|
referencing==0.36.2 |
||||||
|
requests==2.32.3 |
||||||
|
rpds-py==0.25.1 |
||||||
|
six==1.17.0 |
||||||
|
smmap==5.0.2 |
||||||
|
sniffio==1.3.1 |
||||||
|
streamlit==1.45.1 |
||||||
|
tenacity==9.1.2 |
||||||
|
toml==0.10.2 |
||||||
|
tornado==6.5.1 |
||||||
|
typing-inspection==0.4.1 |
||||||
|
typing_extensions==4.14.0 |
||||||
|
tzdata==2025.2 |
||||||
|
urllib3==2.4.0 |
||||||
Loading…
Reference in new issue