From 73cad6757b1299068b95917bcbd8c6f47d23622f Mon Sep 17 00:00:00 2001 From: Akash Gupta Date: Mon, 8 Sep 2025 19:32:08 +0530 Subject: [PATCH] add chat streamlit --- chat-llm-streamlit/app.py | 38 +++++++++++++++++++++++++++++ chat-llm-streamlit/requirements.txt | 3 +++ 2 files changed, 41 insertions(+) create mode 100644 chat-llm-streamlit/app.py create mode 100644 chat-llm-streamlit/requirements.txt diff --git a/chat-llm-streamlit/app.py b/chat-llm-streamlit/app.py new file mode 100644 index 0000000..36ec11b --- /dev/null +++ b/chat-llm-streamlit/app.py @@ -0,0 +1,38 @@ +from openai import OpenAI +import streamlit as st +import os +from dotenv import load_dotenv +load_dotenv() + +st.title("ChatGPT-like clone") +OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "") +OPENAI_BASE_URL = os.getenv("OPENAI_BASE_URL", None) +OPENAI_MODEL = os.getenv("OPENAI_MODEL", None) +client = OpenAI(api_key=OPENAI_API_KEY, base_url=OPENAI_BASE_URL) + +if "openai_model" not in st.session_state: + st.session_state["openai_model"] = OPENAI_MODEL + +if "messages" not in st.session_state: + st.session_state.messages = [] + +for message in st.session_state.messages: + with st.chat_message(message["role"]): + st.markdown(message["content"]) + +if prompt := st.chat_input("What is up?"): + st.session_state.messages.append({"role": "user", "content": prompt}) + with st.chat_message("user"): + st.markdown(prompt) + + with st.chat_message("assistant"): + stream = client.chat.completions.create( + model=st.session_state["openai_model"], + messages=[ + {"role": m["role"], "content": m["content"]} + for m in st.session_state.messages + ], + stream=True, + ) + response = st.write_stream(stream) + st.session_state.messages.append({"role": "assistant", "content": response}) diff --git a/chat-llm-streamlit/requirements.txt b/chat-llm-streamlit/requirements.txt new file mode 100644 index 0000000..aa45bd8 --- /dev/null +++ b/chat-llm-streamlit/requirements.txt @@ -0,0 +1,3 @@ +streamlit +openai +python-dotenv \ No newline at end of file