dinethja commited on
Commit
bcc3787
·
verified ·
1 Parent(s): 681c3a9
Files changed (1) hide show
  1. app.py +115 -0
app.py ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ import logging
4
+
5
+ # Configure logging
6
+ logging.basicConfig(level=logging.INFO)
7
+ logger = logging.getLogger(__name__)
8
+
9
+ # Page configuration
10
+ st.set_page_config(
11
+ page_title="DeepSeek Chatbot - ruslanmv.com",
12
+ page_icon="🤖",
13
+ layout="centered"
14
+ )
15
+
16
+ # Initialize session state for chat history
17
+ if "messages" not in st.session_state:
18
+ st.session_state.messages = []
19
+
20
+ # Sidebar configuration
21
+ with st.sidebar:
22
+ st.header("Model Configuration")
23
+ # st.markdown("[Get HuggingFace Token](https://huggingface.co/settings/tokens)")
24
+
25
+ # Dropdown to select model
26
+ model_options = [
27
+ "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
28
+ # "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
29
+ # "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
30
+ # "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
31
+ # "deepseek-ai/DeepSeek-R1-Distill-Llama-8B"
32
+ ]
33
+ selected_model = st.selectbox("Select Model", model_options, index=0)
34
+
35
+ system_message = st.text_area(
36
+ "System Message",
37
+ value="You are a friendly Chatbot created by ruslanmv.com",
38
+ height=100
39
+ )
40
+
41
+ max_tokens = st.slider(
42
+ "Max Tokens",
43
+ 1, 4000, 512
44
+ )
45
+
46
+ temperature = st.slider(
47
+ "Temperature",
48
+ 0.1, 4.0, 0.7
49
+ )
50
+
51
+ top_p = st.slider(
52
+ "Top-p",
53
+ 0.1, 1.0, 0.9
54
+ )
55
+
56
+ # Function to query the Hugging Face API
57
+ def query(payload, api_url):
58
+ headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
59
+ logger.info(f"Sending request to {api_url} with payload: {payload}")
60
+ response = requests.post(api_url, headers=headers, json=payload)
61
+ logger.info(f"Received response: {response.status_code}, {response.text}")
62
+ return response.json()
63
+
64
+ # Chat interface
65
+ st.title("🤖 DeepSeek Chatbot")
66
+ st.caption("Powered by Hugging Face Inference API - Configure in sidebar")
67
+
68
+ # Display chat history
69
+ for message in st.session_state.messages:
70
+ with st.chat_message(message["role"]):
71
+ st.markdown(message["content"])
72
+
73
+ # Handle input
74
+ if prompt := st.chat_input("Type your message..."):
75
+ st.session_state.messages.append({"role": "user", "content": prompt})
76
+
77
+ with st.chat_message("user"):
78
+ st.markdown(prompt)
79
+
80
+ try:
81
+ with st.spinner("Generating response..."):
82
+ # Prepare the payload for the API
83
+ payload = {
84
+ "inputs": prompt,
85
+ "parameters": {
86
+ "max_new_tokens": max_tokens,
87
+ "temperature": temperature,
88
+ "top_p": top_p,
89
+ "return_full_text": False
90
+ }
91
+ }
92
+
93
+ # Dynamically construct the API URL based on the selected model
94
+ api_url = f"https://api-inference.huggingface.co/models/{selected_model}"
95
+ logger.info(f"Selected model: {selected_model}, API URL: {api_url}")
96
+
97
+ # Query the Hugging Face API using the selected model
98
+ output = query(payload, api_url)
99
+
100
+ # Handle API response
101
+ if isinstance(output, list) and len(output) > 0 and 'generated_text' in output[0]:
102
+ assistant_response = output[0]['generated_text']
103
+ logger.info(f"Generated response: {assistant_response}")
104
+
105
+ with st.chat_message("assistant"):
106
+ st.markdown(assistant_response)
107
+
108
+ st.session_state.messages.append({"role": "assistant", "content": assistant_response})
109
+ else:
110
+ logger.error(f"Unexpected API response: {output}")
111
+ st.error("Error: Unable to generate a response. Please try again.")
112
+
113
+ except Exception as e:
114
+ logger.error(f"Application Error: {str(e)}", exc_info=True)
115
+ st.error(f"Application Error: {str(e)}")