second try at front end because of sandip #2

Closed
lakhia13 wants to merge 5 commits from fndev into main
3 changed files with 136 additions and 0 deletions

18
app.py Normal file

@ -0,0 +1,18 @@
import streamlit as st
import langchain
st.title('Ask maargdarshak')
if 'messages' not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
st.chat_message(message['role']).markdown(message['content'])
prompt = st.chat_input("Ask your question here")
if prompt:
st.chat_message('user').markdown(prompt)
st.session_state.messages.append({'role':'user', 'content':prompt})

70
app_response.py Normal file

@ -0,0 +1,70 @@
import streamlit as st
import requests
# Streamlit App
def chatbot():
st.title("Chatbot powered by Ollama")
# Initialize session state for storing the chat history
if "chat_history" not in st.session_state:
st.session_state["chat_history"] = []
# User input section
user_input = st.text_input("You: ", "")
if user_input:
# Get Ollama response
response = query_ollama(user_input)
# Append the conversation to the chat history
st.session_state["chat_history"].append({"role": "user", "text": user_input})
st.session_state["chat_history"].append({"role": "bot", "text": response})
# Display chat history
if st.session_state["chat_history"]:
for chat in st.session_state["chat_history"]:
if chat["role"] == "user":
st.write(f"**You:** {chat['text']}")
else:
st.write(f"**Bot:** {chat['text']}")
# Function to query Ollama locally
def query_ollama(prompt):
# Assuming Ollama API is running locally on port 11434
url = "http://localhost:11434/api/generate"
payload = {
"model": "llama2", # Change model name if necessary
"prompt": prompt
}
headers = {
"Content-Type": "application/json"
}
try:
response = requests.post(url, json=payload, headers=headers)
# Print the status code and response text for debugging
print(f"Status Code: {response.status_code}")
print(f"Response Text: {response.text}")
# Check if the response is successful
if response.status_code == 200:
try:
response_json = response.json()
return response_json.get("output", "Sorry, I couldn't generate a response.")
except ValueError:
st.error("The server returned an invalid response. Please check the API.")
return "Invalid response format"
else:
st.error(f"Error from API: {response.status_code}")
return f"Error: {response.status_code}"
except requests.exceptions.RequestException as e:
st.error(f"Request failed: {e}")
return "Failed to communicate with the server."
if __name__ == "__main__":
chatbot()

48
cookies.py Normal file

@ -0,0 +1,48 @@
import streamlit as st
import time
from cookies_manager import EncryptedCookieManager
# This should be your unique encryption key, which should be kept secure.
cookie_manager = EncryptedCookieManager(
prefix="myapp_", # Define a prefix for the cookie to avoid conflicts
password="your_secure_cookie_password" # Replace with your secure password
)
# Initialize cookies
if not cookie_manager.ready():
st.stop()
def get_user_timestamp():
# Check if timestamp already exists in cookies
timestamp = cookie_manager.get("user_timestamp")
if timestamp:
st.write(f"Welcome back! Your session started at: {timestamp}")
return timestamp
else:
# Generate a new timestamp in milliseconds
new_timestamp = str(int(time.time() * 1000))
cookie_manager.set("user_timestamp", new_timestamp, max_age=3600 * 24 * 7) # Store the timestamp for 7 days
st.write(f"New session started! Your timestamp is: {new_timestamp}")
return new_timestamp
# Chatbot function
def chatbot():
st.title("Chatbot with Timestamps in Cookies")
# Retrieve or create timestamp for the user
timestamp = get_user_timestamp()
# Display the chat prompt
user_input = st.text_input("You: ", "")
if user_input:
# Here, instead of querying Ollama, we simulate a chatbot response
st.write(f"**Bot:** You said '{user_input}' at {timestamp}")
# Main
if __name__ == "__main__":
chatbot()