import streamlit as st from langchain.schema import HumanMessage, SystemMessage, AIMessage from langchain.chat_models import ChatOpenAI from langchain_groq import ChatGroq import os from constant import Groq_Api_Key ## Streamlit UI st.set_page_config(page_title="Conversational Q&A Chatbot") st.header("Hey, Let's Chat") os.environ["GROQ_API_KEY"] = Groq_Api_Key chat = ChatGroq(temperature=0.5,model="llama3-8b-8192") if 'flowmessages' not in st.session_state: st.session_state['flowmessages'] = [ SystemMessage(content="you are an Ai assistant answer every question by giving link with heading, make sure the link is valid to use") ] ## Function to load OpenAI model and get responses def get_chatmodel_response(question): st.session_state['flowmessages'].append(HumanMessage(content=question)) answer = chat(st.session_state['flowmessages']) st.session_state['flowmessages'].append(AIMessage(content=answer.content)) return answer.content input_text = st.text_input("Input: ", key="input") submit = st.button("Ask the question") ## If ask button is clicked if submit and input_text: response = get_chatmodel_response(input_text) st.subheader("The Response is") st.write(response)