From 4ff4d6722c17b2440a9728c56159c89dd4141460 Mon Sep 17 00:00:00 2001 From: Lasse Studion Date: Wed, 21 Feb 2024 17:15:47 +0300 Subject: [PATCH] Proof of concept --- .gitignore | 3 +- __pycache__/bot.cpython-310.pyc | Bin 0 -> 9017 bytes __pycache__/bot_strings.cpython-310.pyc | Bin 0 -> 946 bytes bot.py | 396 ++++++++++++++++++++++++ bot_strings.py | 12 + interface.py | 0 streamlit_interface.py | 162 ++++++++++ 7 files changed, 572 insertions(+), 1 deletion(-) create mode 100644 __pycache__/bot.cpython-310.pyc create mode 100644 __pycache__/bot_strings.cpython-310.pyc create mode 100644 bot_strings.py delete mode 100644 interface.py create mode 100644 streamlit_interface.py diff --git a/.gitignore b/.gitignore index fb9df04..9702d2a 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ -/.venv \ No newline at end of file +/.venv +.env \ No newline at end of file diff --git a/__pycache__/bot.cpython-310.pyc b/__pycache__/bot.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..27dcee8948390e67f45b9e99e0d2ac6c562d8e7d GIT binary patch literal 9017 zcmbtZ-E$mOcJID@yQk+vqp>AhmW@qY#%mAQ5+4gJ4rOBuX3Y{ouqomJGHK1d8jUp5 zJ-*#zuX;xbRVKAhR3)`9TLDrMZ&c+Wc}eZRke58AlB#~q6K|W4FCf_Yo!ir+k!;vh zW$Zq>-}m;t=lss^o(`v`N(O#K`=8|23x@G8l$re5$h?Fn`ZY5QVFD!_;}! zVEV7su<)04dCzXxX4b}OI4HBag`V4RO+(su4PC>##a^*dG>z8`VGHLYLpXO?ql8>R zxX8K4m60oo5^^P3zH2oq$d^S0`HId@AwMOm$X9i~D(a#p4t!)bro};7TV-NK%znfh z2joF_n zX#~H{O675#`wMYmJkmew5q_ilo_JW_@l1!dACt$$qw|KGk+VBi-$%W@o4P};?&1@>)--j{iJwo-q0%< ztpl}ZQTrp*&dIs;Gg#eOc^0LQ$f7(;YYQAXEl%fS;+^VpZpUn#3Cfe@0mp9tNWaVD z;>=wWC&kvE*ghv`c1(=+GL7FnuUqL+Z>j9ugn8YlKmH}@>~h^q%iYiyO%cX2xQ%RS zC5)q3b%J(;oYU_k8MmnMMLQabspCzr)@!5^NFNjicIx!!%E0pjFJGpoWY>?S z@ZxZy%Oy2Kqo2HiL1@Z+z--~@Ls|v>TvVid$r81rMR}p}!d7e0*USChUdb`)kndvN zEMM)B%U-|JiU(Nj-uFIyS3W)KBGOhHl?nTzV&hxh%cmN0D55#gB13vL}Pc@5M`Rsj#j5UQcSU?5l8HZp2H2 zpyx+R0MW(%X6iPZouCspo3w~1hH%W^@z3IDL683TH7jwG-RF$m;}-jlbM`;nX5Vs$ zeZvdvYwoiD=0)~jyu|*Km)U>t3j23H#lGTI_HTTe{VT6Izpc$A(-{&k=Brr--e75T z4$Wp-YBqbJ7<4IJX*O>S{BBmGj$^@kBPPTnR7M;%Nv zOB8^k95lnqh)jbE1{8GD!pnXnUk>BcQ4$6iEI;5mXguvXo`Kc%v<2f+-^a$0`#tyh%h;X@UnzmD6h_v$`wE>Q1|=|*kU?$c@VFK zYJ{3~CuL!jI&De*Rh`1ZQ){5QX<<$J0=fl&mc3AIs(DoH-)A}i{lcWq73Nrsl}z_u z$xdpc%^II+i(GR4BeS40(tUq}M7CMjCUeKy!j`Qwb%Nv;8!};mpKv%NbHh@{hin@M zGlUZyv9Yp%b6n??&&%8BCtUO_e#*A2-4V8UY>U#6&%vqA8N^ItXPtVxc#1MZr=bgYzSb(G7#xhwt^<0 zpx2L6TT`dhRr1zAMsbw#eh6lD23_=w>S?6v1$C4PeoW#ih?EOI_8XP1AH~hl^fX4) zhaSS6`$2R^s(p%*x|t)606itJTAO2Q+dao`fA2o)!U27{9*dl^qeq`);wW@SoISqsmeAAtq^A~N4G929?P ztP-{2A5{nvBs5JJt6bk}c*Fjs!HouKLMbg=fms4svt{l<2U31(^hAqD#KyY0Wyb8T zv2E?p6^d|zYiW}i2kJ| zgC}5$JUTJ7k*>iE&7nEO#ZnjpoVs9WXKk~gLl?1IQ$q{AtPQ4~9$IMcqCLie*;;ok z)Su4BzEv1nZ3AE;sRwWw%v-|;csKoRv)FDrfP-S&#AwT+xK$VyuH)J|v+p+vGoI@mY(lv;{%9$ zenf8G!?9?xxf*u6;T0+rv_hms98V{T{UAV8sQM7L1{j8fle-P$+=hJD6vc5 z9n|f(mCWWkJ~^y<2}8m%9|le2>eL@Vq*EhXYT*rPJwMAj}k4_?R$^5N)5gQz#LzuCQJ~Y(2di|=v|uhZu_nJ zpw^_EgXkvP=p2#nVkR@)`&Fkx*6dWih2%%RgJj9^2`zbW1PwT?oC#nggSaXwyH*;i z(D5m@e4}1W3-E1Sza_P5rw1Q&wrQEX-qTsYDq~|dW%~O4)q5CD17l9JE>Z#QTn2o! z8R~tA)E#>eZ3AONNGsVpWYvF*rV&{sHHlQSjf3-cH|-`OF9xjSIx_Rx9JxJA@Oo6^vRL0O;KaDrU$=M9*F0T*4^F}K|0 zcx-Kx)3oOeMlR5ii+g^nmLYD3T7d*1*=g6k_H5Q2DjKUc!JYU1DsH#BZm_5=`937G@qbhK_`5*nOQqEoUmYYL~pEi z%=t7>AnpNp>2=y`u@{6fLScJ4`M3Q}*Iz*ps}tl?`w?gau@xUgW3?l@BFZr5+NvfX zEuUa(m0)jje%-@^&BQ*&(*4fi55~|d%Mi-zBjf{46bw*)+wTsfcXnYJ!M_EHqAgJI zLVZzh=qsUKFk*Jyfxr#{+~m(;l|jaBM*s<8a2ugVY@qk{)hj=F`q>veeShv?qrKO! zzqyS4$gVIR-YI8Z1km(~;K_P3iAf20yQ7C51MHUs4;k^{DW|oioYtCx{k#)w1mT?^ zHGh$sTVGmecoku5J7~sgvtClKQ_ndP`uajQE&@egM2M#XWh3=7D&n+@sjc*%YG&~{ zpQ%r2^lVf}+j2DAwQtp<0g7}2O;QcDOtX!FrBR`4Wi!9FS{rgRwe^NKsu>99b^G8~ zDU9=vc%ru$#I;iTpN6)Zn@lh?AaXWr8H;!dCRSxM+3JSuM1bP>S+weSv& zz6Q&N&G}w5h!FPego9`l-ke+)UAVnBm*2SgL+*Xyh8pN=!&ScGY?TFSfNk6KOD\~bYhLKQdV=x>qz}2 zo`_tZYgA3wt{#{@ReM}fO$*PSUrrA8Mhy`N*bNhC77fOT=vwKu>=VTAX3^>frcv)e zjJfi}{pbg|a&~#r5jy=^V|t*ObPf2V-o?1d%Fo-24F3SkXRyaVo}FChAQ zsE?2|652_@D13vW+SGc8<{?HNh{LqJDq3<2dWMJ+Fba?Q6Y9)~mhFkq&`QUV2#T^` zby%sUK8VL4h{)y*qBck&42YEd8POZ_7Gex^(#vWMy|QmU?1GmWmRZsohc*8xDjS*& zHb-ob-eN)@wJQ23J539Bkswe#0TWSFN&)d6JZ7_*Jb4Ykkl)!o-@|x*0AD=%P(W}5 zIsp{vZF*h*POz9feiH!!eSeq)G1>#-`y<%UbY-#58dDjM%?+KdWw1V4ZZh@zS&-5r zLViL0A}(@_Ay9-A8jEzi zF~cW<6m2Y85!EJT>3lV;r6uje^}@0nz%Quk4HBBMYn=OlvSbR`{XyTP=(BipMDWSr z8V4ST`Kec^iw?8sSj-<&R>NhBQi{Y85_I9n!WFh&2?E1@yp{syXST)VfBG1jE+ zi`jRYi%?)A`fCW+Aa_ncPmg0&Sk)jOYJ*atI$bK@tC2-?z`5se`l7^G1)mTq zjKVJzAl9M9`&E26%JK-<+;=&*=r6a(LS{{blE-Z|oq(CwQ14irA+U_UL`1S#Dm?b=%uHBl-U8hx(K<_A@%`(jn&=Exb$W zC6i1h=d{k!F;nb}jT4y;S+7%4^dBLvCGIvED(10qDk=M%jXSbKn5@ZnT^I(h?G(#o z4IS->e@+X5luSOeFYF`ZgeRTK4(X1}NN17}*T!J+Er$zvrSHS*2L=R`GZ7A4fQfeo zMuYvGAv_#e9h8KD2-;bMeQg!&l+l5q^2fSVy#hw4z$gd~ysDfJ#Dz6hy40>la0iE~ z3=oJNPF+ogwnt#}bQU5?z+6~qBC+!zET|9F+@U^J#Ad9p0o(*mgfHN-=Eb9TL?k?+ zEeJ%7A?C#k1nLi*=rAMV8kH{=dpoTG8m}g-VGCF>mNQizMUpU!w?Y(Cg~j6WDlQ=Q zh^Qq@sNjjVGG=Q_o@v`u^fsCeq?f9in-lFZaBa!0+qPkI=c3OXPcbOND{6NYntScR z%5$Q>8k#DryqNzV>LpEvYCieOe3xGS_lrKgg~$FIYKrRbiQxQH8D&wJ(0}XQ)C*%& zJ41Brp*jlv+8TlV_R!^U71e3e None: + # Fetch the OpenAI key from the environment variables + self.OPEN_AI_KEY = os.getenv("OPEN_AI_KEY") + # # Set the OpenAI key + # openai.api_key = self.OPEN_AI_KEY + self.url = "https://api.openai.com/v1/chat/completions" + self.headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.OPEN_AI_KEY}", + } + + self.memory: list[dict] = None + + def generate(self, message): + + message = re.sub(" +", " ", message) + + if self.memory: + messages = self.memory + if messages[-1]["content"] != message: + messages.append({"role": "user", "content": message}) + else: + messages = [ + {"role": "system", "content": self.system_prompt}, + {"role": "user", "content": message}, + ] + + data = {"model": "gpt-3.5-turbo", "messages": messages} + # Print the last message in yellow + last_message = messages[-1]["content"] + print("\n\033[94m" + last_message + "\033[0m\n") + + response = requests.post(self.url, headers=self.headers, json=data).json() + answer = response["choices"][0]["message"] + + print("\033[95m" + answer["content"] + "\033[0m") + + return answer + + +class Chatbot(BaseBot): + def __init__(self) -> None: + system_prompt = botstrings.chatbot_system_prompt + super().__init__() # Inherit from BaseBot + # TODO Check if there is an older conversation with the same number. + # Set the system prompt and the first user message. + self.first_instructions_sent = False + self.informations_requested = False + self.memory = [{"role": "system", "content": system_prompt}] + + def ask_for_info(self, report: Report, chatbot: "Chatbot"): + # Ask for information + looking_for = [] + group_found = False + for group in report.info_groups: + if not group_found: + for info in group: + if getattr(report, info) is None: + group_found = True + looking_for.append(info) + + # Check so that the set of info is not already asked for without an answer + if looking_for == report.looking_for: + for info in looking_for: + setattr(report, info, "Unanswered") + looking_for = [] + group_found = False + + if looking_for != []: + report.looking_for = looking_for + # Ask for the information + general_bot.memory = [ + {"role": "user", "content": 'Formulate a question asking for the following information: ["name", "age"]'}, + {"role": "assistant", "content": "Can you tell me about the name and age of what you've found?"}, + ] + if looking_for == ["image"]: + question = 'image' + else: + prompt = f"Formulate a question asking for the following information: {looking_for}" + general_bot.memory.append({"role": "user", "content": prompt}) + question = general_bot.generate(prompt)["content"] + return question + + else: + return False, False + + +class CheckerBot(BaseBot): + def __init__(self) -> None: + self.system_prompt = botstrings.checker_bot_system_prompt + super().__init__() # Inherit from BaseBot + self.history = [] + + def check_message_type(self, message): + prompt = f"""What kind of message is this? + '''{message}''' + Don't care about the content, only the type of message. \ + Answer with any of the following: ["greeting", "question", "information", "statement"]\ + """ + + result = self.generate(prompt)["content"] + if "greeting" in result.lower(): + message_type = "greeting" + elif "question" in result.lower(): + message_type = "question" + elif "information" in result.lower() or "statement" in result.lower(): #TODO should "statement" be here? + message_type = "information" + + return message_type + + def check_answer(self, answer, question, chatbot: Chatbot): + return True # TODO We need to fins a good way to check if the answer is an answer to the question. + question = question.replace("\n", " ") + answer = answer.replace("\n", " ") + if question == botstrings.first_instructions: + question = "What have you found?" + + prompt = f''' + Have a look at this conversation:\n + """ + {chat2string(chatbot.memory[-4:]).strip()} + """\n + Is the last message a resonable answer to the last question ({question})? + Answer ONLY with any of the following: "True", "False", "Unclear" + '''.strip() + result = self.generate(prompt)["content"] + + if "unclear" in result.lower(): + prompt = f""" + A user is having a conversation with an assistant. This is the conversation:\n'''{chatbot.memory}'''\ + Is the last message ('''{answer}''') an answer to the question ('{question}')? + Answer ONLY with any of the following: "True", "False", "Unclear"\ + """ + self.memory.append({"role": "user", "content": prompt}) + + result = self.generate(prompt) + + if "true" in result.lower(): + answered = True + + elif "false" in result.lower() or "unclear" in result.lower(): + answered = False + + return answered + + def check_for_info(self, user_message, report: Report, looking_for: list, n_try=0): + info_dict = {} + for info in looking_for: + if getattr(report, info) is None: + info_dict[info] = report.descriptions[info] + prompt = f"""" + This is a message from a user: '''{user_message}'''\n + This is a dict describing what info I want:\n\n{info_dict} \n\n\ + Take a look at the message and create a dictionary with the information that is requested.\ + There might not be information available in the mesasge for all fields. \ + If you can't find information for a certain field, fill that with a python null value ("None" or "null"). + Do not include any explanations, only provide a RFC8259 compliant JSON response following this format without deviation.\ + """ + + n_try += 1 + result = self.generate(prompt) + + try: + json_content = json.loads( + result["content"] + ) # Parse the string back into a dictionary + for key, value in json_content.items(): + if value not in ["null", "None", "", "Unknown"]: + setattr(report, key, value) + except: + try: + result_content = result_content[ + result_content.find("{") : result_content.rfind("}") + 1 + ] + json_content = json.loads( + result["content"] + ) # Parse the string back into a dictionary + for key, value in json_content.items(): + if value not in ["null", "None", "", "Unknown"]: + setattr(report, key, value) + except: + if n_try > 3: + return False + self.check_for_info(user_message, report, looking_for, n_try=n_try) + + return json_content + + def check_for_tips(self, bot_message): + # Check if the message is a tip + prompt = f""" + An assistant wants to send this message to a user: '''{bot_message}'''\ + Is the message a tip or recommendation on how to handle a suspicious object? + Answer ONLY with any of the following: "True", "False"\ + """ + result = self.generate(prompt)["content"] + print(result) + if "true" in result.lower(): + is_tip = True + elif "false" in result.lower(): + is_tip = False + + return is_tip + + +class GeneralBot(BaseBot): + def __init__(self) -> None: + self.system_prompt = botstrings.general_bot_system_prompt + super().__init__() + self.memory = [] + + +def send(message, check=True): + + # Check if the message is a tip. + if check: + tip = checker_bot.check_for_tips(message) + else: + tip = False + + if tip: + return False + else: + print(message) # TODO Send the message to the user + return True + + +def chat2string(chat: list): + chat_string = "" + for message in chat: + chat_string += f"{message['role']}: {message['content']}\n" + return chat_string + + +if __name__ == "__main__": + + #! For testing + user_input = "I have found a strange looking thing on my lawn." + + # Initiate the chatbot. + chatbot = Chatbot( + first_message=user_input, + system_prompt="You are an assistant chatting with a user.", + ) # TODO ? Add phone number, etc. + checker_bot = CheckerBot() + general_bot = GeneralBot() + report = Report() + + while True: + # Check the message type + message_type = checker_bot.check_message_type(message=user_input) + print("Message type:", message_type) + + if "greeting" in message_type.lower(): + # Answer the greeting + bot_answer = chatbot.generate() + if not chatbot.first_instructions_sent: + # Give instructions for how to use the bot + chatbot.instructions_sent = True + chatbot.informations_requested = True + chatbot.memory.append( + {"role": "system", "content": botstrings.first_instructions} + ) + else: + chatbot.memory.append(bot_answer) + + elif "question" in message_type.lower(): + if not chatbot.first_instructions_sent: + # Give instructions for how to use the bot + chatbot.first_instructions_sent = True + chatbot.memory.append( + {"role": "system", "content": botstrings.first_instructions} + ) + + bot_answer = chatbot.generate() # TODO How to handle questions? + + # Check if the answer is an answer to the question. + answered = checker_bot.check_answer( + bot_answer["content"], user_input, chatbot + ) + if not answered: + bot_answer = "I could not understand your question. Please ask again." + send(bot_answer) + chatbot.memory.append({"role": "system", "content": bot_answer}) + + elif "information" in message_type.lower(): + if not chatbot.first_instructions_sent: + # Give instructions for how to use the bot + chatbot.first_instructions_sent = True + chatbot.informations_requested = True + chatbot.memory.append( + {"role": "system", "content": botstrings.first_instructions} + ) + send(botstrings.first_instructions, check=False) + else: + if chatbot.informations_requested: + answered = checker_bot.check_answer( + user_input, chatbot.memory[-1]["content"], chatbot + ) + if answered: + # Ask for information and send that message to the report + result = checker_bot.check_for_info( + user_input, report, report.looking_for + ) + if result: + pprint(result) + else: + send( + "I could not understand your message. Please try again.", + check=False, + ) + looking_for = chatbot.ask_for_info(report) + + else: + send( + "I could not understand your message. Please try again.", + check=False, + ) + + else: + print("Unknown message type") + + user_input = input(">>> ") + + if os.path.isfile(user_input): + + report.image = user_input + else: + print("User input is not a valid image file.") + +general_bot = GeneralBot() diff --git a/bot_strings.py b/bot_strings.py new file mode 100644 index 0000000..50db7ab --- /dev/null +++ b/bot_strings.py @@ -0,0 +1,12 @@ +class BotStrings(): + def __init__(self): + self.first_instructions = """ + Hi! I'm a chatbot from UNMAS (United Nations Mine Action Service). Use me to report a suspicious object that you think might me a landmine or an explosive remnant of war. + """.strip() + + self.chatbot_system_prompt = 'You are an assistant chatting with a user.' #TODO Add instructions for how to answer and what not to answer. + self.checker_bot_system_prompt='A user is chatting with an assistant. You are checking the messages.' + self.general_bot_system_prompt='You are a bot used for finding information.' + + +botstrings = BotStrings() \ No newline at end of file diff --git a/interface.py b/interface.py deleted file mode 100644 index e69de29..0000000 diff --git a/streamlit_interface.py b/streamlit_interface.py new file mode 100644 index 0000000..33d43a8 --- /dev/null +++ b/streamlit_interface.py @@ -0,0 +1,162 @@ +import streamlit as st +import numpy as np +from PIL import Image +from bot import Chatbot, Report, CheckerBot, GeneralBot, botstrings +from time import sleep +from pprint import pprint +def send(message, check=True, add_to_memory=True): + store_state() + # Check if the message is a tip. + if check: + tip = checker_bot.check_for_tips(message) + else: + tip = False + + if tip: + print("☠️ I'TS A TIP ☠️") + return False + else: + with st.chat_message("assistant"): + st.markdown(message) + # Add assistant response to chat history + st.session_state.messages.append({"role": "assistant", "content": message}) + if add_to_memory: + chatbot.memory.append({"role":"assistant", "content": message}) + return True + +def store_state(): + st.session_state.chatbot = chatbot + st.session_state.report = report + + +st.title("UNMAS Bot") + +# Initialize chat history +if "messages" not in st.session_state: + st.session_state.messages = [] + +if 'upload_image' not in st.session_state: + st.session_state.upload_image = False + +# Load chatbot from session state or create a new one +if "chatbot" not in st.session_state: + chatbot = Chatbot() + st.session_state.chatbot = chatbot +else: + chatbot = st.session_state.chatbot + +# Load report from session state or create a new one +if "report" not in st.session_state: + report = Report() + st.session_state.report = report +else: + report = st.session_state.report + +# Load checker and general bot from session state or create a new one +if "checker_bot" not in st.session_state: + checker_bot = CheckerBot() + st.session_state.checker_bot = checker_bot +else: + checker_bot = st.session_state.checker_bot + + +# Display chat messages from history on app rerun +for message in st.session_state.messages: + with st.chat_message(message["role"]): + st.markdown(message["content"]) + + +if st.session_state.upload_image: + user_input = None + img_file = st.file_uploader('Upload an image', type=['png', 'jpg', 'jpeg']) + + if img_file is not None: + image = Image.open(img_file) + img_array = np.array(image) + report.image = img_array + send('Thanks!', check=False, add_to_memory=False) + question = chatbot.ask_for_info(report, chatbot=chatbot) + send(question, check=False) + st.session_state.upload_image = False + store_state() + + + # user_input = st.chat_input('') + # if user_input: + # st.chat_message("user").markdown(user_input) + # chatbot.memory.append({"role":"user", "content": user_input}) + # st.session_state.messages.append({"role": "user", "content": user_input}) + +else: + user_input = st.chat_input('') + + +if user_input and not st.session_state.upload_image: + + st.chat_message("user").markdown(user_input) + # Add user message to chat history + st.session_state.messages.append({"role": "user", "content": user_input}) + chatbot.memory.append({"role":"user", "content": user_input}) + + # Check the message type + print('Check message type') + message_type = checker_bot.check_message_type(message=user_input) + print('Message type:', message_type) + + if not chatbot.first_instructions_sent: + # Give instructions for how to use the bot + chatbot.first_instructions_sent = True + chatbot.informations_requested = True + send(botstrings.first_instructions, check=False, add_to_memory=False) + sleep(1.2) + send('So first, tell me what you found?', check=False) + + else: + if 'greeting' in message_type.lower(): + # Answer the greeting + bot_answer = chatbot.generate(user_input) + send(bot_answer['content']) + + + elif 'question' in message_type.lower(): + + bot_answer = chatbot.generate(user_input) # TODO How to handle questions? + + # Check if the answer is an answer to the question. + answered = checker_bot.check_answer(bot_answer['content'], user_input, chatbot) + if not answered: + bot_answer = 'I could not understand your question. Please ask again.' + send(bot_answer) + chatbot.memory.append({"role":"assistant", "content": bot_answer}) + + elif 'information' in message_type.lower(): + + if chatbot.informations_requested: + answered = checker_bot.check_answer(user_input, chatbot.memory[-1]['content'], chatbot) + if answered: + # Ask for information and send that message to the report + result = checker_bot.check_for_info(user_input, report, report.looking_for) + if result: + print(result) + else: + send('I could not understand your message. Please try again.', check=False) + question = chatbot.ask_for_info(report, chatbot=chatbot) + + if question == 'image': + question = "Can you upload a picture of what you have found?" + st.session_state.upload_image = True + print(st.session_state.upload_image) + send(question, check=False, add_to_memory=False) + st.rerun() + else: + send(question, check=False) + + + else: + send('I could not understand your message. Please try again.', check=False) + + + else: + send('I could not understand your message. Please try again.', check=False) + + store_state()