vishalkatheriya commited on
Commit
d22c8d7
·
verified ·
1 Parent(s): 3484fed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +138 -47
app.py CHANGED
@@ -1,29 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
- from transformers import pipeline
3
  from googlesearch import search
4
 
5
- # Function to load the model
6
- @st.cache_resource
7
- def load_model():
8
- model_name = "nisten/Biggie-SmoLlm-0.15B-Base" # Replace with your preferred model
9
- generator = pipeline("text-generation", model=model_name)
10
- return generator
11
 
12
- # Load the model once
13
- generator = load_model()
14
 
15
- # Function to process the query using the open-source LLM for general chat
16
- def chat_with_llm(query):
17
- prompt = f"User: {query}\nAssistant:"
18
- response = generator(prompt, max_length=100, num_return_sequences=1)
19
- return response[0]['generated_text'].strip()
 
 
 
 
 
 
 
 
 
20
 
21
  # Function to process the query for search intent
22
  def process_query_with_llm(query):
23
- prompt = f"User asked: '{query}'. What would be the best search query to use?"
24
- response = generator(prompt, max_length=50, num_return_sequences=1)
25
- st.write("query passing for search",response[0]['generated_text'].strip())
26
- return response[0]['generated_text'].strip()
27
 
28
  # Function to perform a Google search using the googlesearch-python package
29
  def search_web(query):
@@ -35,40 +132,27 @@ def search_web(query):
35
  # Streamlit UI
36
  st.title("Interactive Chatbot")
37
 
 
 
 
 
38
  # Input field for user query
39
  user_input = st.text_input("You:", "")
40
 
 
41
  if user_input:
42
  st.write(f"**You:** {user_input}")
 
43
  search_phrases = [
44
- "search",
45
- "find",
46
- "get me",
47
- "give me",
48
- "look up",
49
- "show me",
50
- "retrieve",
51
- "browse",
52
- "where can I find",
53
- "search for",
54
- "look for",
55
- "can you find",
56
- "find me",
57
- "what is",
58
- "how to",
59
- "who is",
60
- "where is",
61
- "what are",
62
- "tell me about",
63
- "do you know",
64
- "could you find",
65
- "can you search",
66
- "help me find",
67
- "explore",
68
- "fetch",
69
- "locate",
70
- "suggest me",
71
- "suggest"]
72
 
73
  # Determine if the query is a search or a general chat
74
  if any(keyword in user_input.lower() for keyword in search_phrases):
@@ -88,5 +172,12 @@ if user_input:
88
  st.write("Sorry, I couldn't find any relevant links.")
89
  else:
90
  # Handle general conversation
91
- response = chat_with_llm(user_input)
92
  st.write(f"**Chatbot:** {response}")
 
 
 
 
 
 
 
 
1
+ # import streamlit as st
2
+ # from transformers import pipeline
3
+ # from googlesearch import search
4
+
5
+ # # Function to load the model
6
+ # @st.cache_resource
7
+ # def load_model():
8
+ # model_name = "nisten/Biggie-SmoLlm-0.15B-Base" # Replace with your preferred model
9
+ # generator = pipeline("text-generation", model=model_name)
10
+ # return generator
11
+
12
+ # # Load the model once
13
+ # generator = load_model()
14
+
15
+ # # Function to process the query using the open-source LLM for general chat
16
+ # def chat_with_llm(query):
17
+ # prompt = f"User: {query}\nAssistant:"
18
+ # response = generator(prompt, max_length=100, num_return_sequences=1)
19
+ # return response[0]['generated_text'].strip()
20
+
21
+ # # Function to process the query for search intent
22
+ # def process_query_with_llm(query):
23
+ # prompt = f"User asked: '{query}'. What would be the best search query to use?"
24
+ # response = generator(prompt, max_length=50, num_return_sequences=1)
25
+ # st.write("query passing for search",response[0]['generated_text'].strip())
26
+ # return response[0]['generated_text'].strip()
27
+
28
+ # # Function to perform a Google search using the googlesearch-python package
29
+ # def search_web(query):
30
+ # search_results = []
31
+ # for result in search(query, num_results=10):
32
+ # search_results.append(result)
33
+ # return search_results
34
+
35
+ # # Streamlit UI
36
+ # st.title("Interactive Chatbot")
37
+
38
+ # # Input field for user query
39
+ # user_input = st.text_input("You:", "")
40
+
41
+ # if user_input:
42
+ # st.write(f"**You:** {user_input}")
43
+ # search_phrases = [
44
+ # "search",
45
+ # "find",
46
+ # "get me",
47
+ # "give me",
48
+ # "look up",
49
+ # "show me",
50
+ # "retrieve",
51
+ # "browse",
52
+ # "where can I find",
53
+ # "search for",
54
+ # "look for",
55
+ # "can you find",
56
+ # "find me",
57
+ # "what is",
58
+ # "how to",
59
+ # "who is",
60
+ # "where is",
61
+ # "what are",
62
+ # "tell me about",
63
+ # "do you know",
64
+ # "could you find",
65
+ # "can you search",
66
+ # "help me find",
67
+ # "explore",
68
+ # "fetch",
69
+ # "locate",
70
+ # "suggest me",
71
+ # "suggest"]
72
+
73
+ # # Determine if the query is a search or a general chat
74
+ # if any(keyword in user_input.lower() for keyword in search_phrases):
75
+ # # If the user input indicates a search intent
76
+ # search_query = process_query_with_llm(user_input)
77
+ # st.write(f"**Processed Query:** {search_query}")
78
+
79
+ # # Search the web using the processed query
80
+ # links = search_web(search_query)
81
+
82
+ # # Display the search results
83
+ # if links:
84
+ # st.write("Here are some links you might find useful:")
85
+ # for idx, link in enumerate(links):
86
+ # st.write(f"{idx + 1}. {link}")
87
+ # else:
88
+ # st.write("Sorry, I couldn't find any relevant links.")
89
+ # else:
90
+ # # Handle general conversation
91
+ # response = chat_with_llm(user_input)
92
+ # st.write(f"**Chatbot:** {response}")
93
  import streamlit as st
94
+ from huggingface_hub import InferenceClient
95
  from googlesearch import search
96
 
97
+ # Initialize the InferenceClient with the model and token only once
98
+ if 'client' not in st.session_state:
99
+ st.session_state.client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
 
 
 
100
 
101
+ client = st.session_state.client
 
102
 
103
+ # Function to get chat completion from the model
104
+ def chat_with_llm(messages):
105
+ try:
106
+ response_stream = client.chat_completion(messages=messages, stream=True, max_tokens=500)
107
+ delta_content = ""
108
+
109
+ for message in response_stream:
110
+ if 'choices' in message and message['choices']:
111
+ delta_content += message['choices'][0]['delta'].get('content', '')
112
+
113
+ return delta_content.strip()
114
+
115
+ except Exception as e:
116
+ return f"An error occurred: {e}"
117
 
118
  # Function to process the query for search intent
119
  def process_query_with_llm(query):
120
+ messages = [{"role": "user", "content": f"User asked: '{query}'. What would be the best search query to use?"}]
121
+ response = chat_with_llm(messages)
122
+ st.write("Query passing for search:", response)
123
+ return response
124
 
125
  # Function to perform a Google search using the googlesearch-python package
126
  def search_web(query):
 
132
  # Streamlit UI
133
  st.title("Interactive Chatbot")
134
 
135
+ # Initialize message history if not already done
136
+ if 'message_history' not in st.session_state:
137
+ st.session_state.message_history = []
138
+
139
  # Input field for user query
140
  user_input = st.text_input("You:", "")
141
 
142
+ # Check if the input field is not empty
143
  if user_input:
144
  st.write(f"**You:** {user_input}")
145
+
146
  search_phrases = [
147
+ "search", "find", "get me", "give me", "look up", "show me", "retrieve",
148
+ "browse", "where can I find", "search for", "look for", "can you find",
149
+ "find me", "what is", "how to", "who is", "where is", "what are",
150
+ "tell me about", "do you know", "could you find", "can you search",
151
+ "help me find", "explore", "fetch", "locate", "suggest me", "suggest"
152
+ ]
153
+
154
+ # Update message history with user input
155
+ st.session_state.message_history.append({"role": "user", "content": user_input})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
156
 
157
  # Determine if the query is a search or a general chat
158
  if any(keyword in user_input.lower() for keyword in search_phrases):
 
172
  st.write("Sorry, I couldn't find any relevant links.")
173
  else:
174
  # Handle general conversation
175
+ response = chat_with_llm(st.session_state.message_history)
176
  st.write(f"**Chatbot:** {response}")
177
+
178
+ # Update message history with the response
179
+ st.session_state.message_history.append({"role": "assistant", "content": response})
180
+
181
+ # Ensure input field is cleared after processing
182
+ st.text_input("You:", "", key="user_input")
183
+