ObindiG commited on
Commit
516f036
·
verified ·
1 Parent(s): 92fa0f1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -74
app.py CHANGED
@@ -1,9 +1,12 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- # Initialize the inference client
 
 
5
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
 
 
7
  def respond(
8
  message,
9
  history: list[tuple[str, str]],
@@ -12,98 +15,50 @@ def respond(
12
  temperature,
13
  top_p,
14
  ):
15
- # Hardcoded system message in Swahili with ownership details
16
- system_message = (
17
- "Jina lako ni Sema. Wewe ni chatbot wa Kiswahili wa Laocta Tech Labs. "
18
- "Laocta Tech Labs ni kampuni ya AI iliyoko Kenya. "
19
- "Unaweza kujifunza zaidi kuhusu sisi kwenye: https://www.laoctatechlabs.com\n\n"
20
- "Jibu kila swali kwa lugha ya Kiswahili kwa ufasaha na usahihi. "
21
- "Kama swali ni kwa Kiingereza, jibu kwa Kiswahili. "
22
- "Tumia lugha rahisi na yenye heshima kila wakati."
23
- )
24
-
25
- # Build conversation history
26
  messages = [{"role": "system", "content": system_message}]
27
-
28
- for user_msg, bot_msg in history:
29
- if user_msg:
30
- messages.append({"role": "user", "content": user_msg})
31
- if bot_msg:
32
- messages.append({"role": "assistant", "content": bot_msg})
33
-
34
  messages.append({"role": "user", "content": message})
35
 
36
- # Stream the response
37
  response = ""
38
- for chunk in client.chat_completion(
 
39
  messages,
40
  max_tokens=max_tokens,
41
  stream=True,
42
  temperature=temperature,
43
  top_p=top_p,
44
  ):
45
- token = chunk.choices[0].delta.content
 
46
  response += token
47
  yield response
48
 
49
- # Custom CSS for Laocta branding
50
- custom_css = """
51
- footer {visibility: hidden}
52
- .gradio-container {font-family: "Swahili Sans", Arial, sans-serif}
53
- """
54
 
55
- # Create the ChatInterface
 
 
56
  demo = gr.ChatInterface(
57
- fn=respond,
58
- chatbot=gr.Chatbot(
59
- label="Sema",
60
- bubble_user=True,
61
- avatar_images=("user.png", "laocta_logo.png")
62
- ),
63
  additional_inputs=[
64
- gr.Textbox(
65
- value="Sema - Chatbot wa Kiswahili wa Laocta Tech Labs",
66
- label="Mpangilio wa Mfumo",
67
- interactive=False
68
- ),
69
- gr.Slider(
70
- minimum=1, maximum=2048, value=512, step=1,
71
- label="Idadi ya Herufi (512)"
72
- ),
73
  gr.Slider(
74
- minimum=0.1, maximum=4.0, value=0.7, step=0.1,
75
- label="Kiwango cha Ubunifu (0.7)"
76
- ),
77
- gr.Slider(
78
- minimum=0.1, maximum=1.0, value=0.95, step=0.05,
79
- label="Usahihi wa Majibu (0.95)"
80
  ),
81
  ],
82
- title="Sema - Chatbot wa Laocta Tech Labs",
83
- description="""
84
- <div style='text-align: center'>
85
- <img src='https://www.laoctatechlabs.com/logo.png' width=150>
86
- <h2>Karibu kwa Sema</h2>
87
- <p>Chatbot wa Kiswahili wa Laocta Tech Labs</p>
88
- <p><a href='https://www.laoctatechlabs.com' target='_blank'>Tembelea tovuti yetu</a></p>
89
- </div>
90
- """,
91
- submit_btn="Tuma",
92
- retry_btn="Jaribu Tena",
93
- undo_btn="Ondoa",
94
- clear_btn="Futa Mazungumzo",
95
- examples=[
96
- ["Habari yako?"],
97
- ["Laocta Tech Labs ni nini?"],
98
- ["Unaweza kunisaidia na nini?"]
99
- ],
100
- css=custom_css
101
  )
102
 
 
103
  if __name__ == "__main__":
104
- demo.launch(
105
- server_name="0.0.0.0",
106
- server_port=7860,
107
- share=True,
108
- favicon_path="laocta_favicon.ico"
109
- )
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+ """
5
+ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
+ """
7
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
9
+
10
  def respond(
11
  message,
12
  history: list[tuple[str, str]],
 
15
  temperature,
16
  top_p,
17
  ):
 
 
 
 
 
 
 
 
 
 
 
18
  messages = [{"role": "system", "content": system_message}]
19
+
20
+ for val in history:
21
+ if val[0]:
22
+ messages.append({"role": "user", "content": val[0]})
23
+ if val[1]:
24
+ messages.append({"role": "assistant", "content": val[1]})
25
+
26
  messages.append({"role": "user", "content": message})
27
 
 
28
  response = ""
29
+
30
+ for message in client.chat_completion(
31
  messages,
32
  max_tokens=max_tokens,
33
  stream=True,
34
  temperature=temperature,
35
  top_p=top_p,
36
  ):
37
+ token = message.choices[0].delta.content
38
+
39
  response += token
40
  yield response
41
 
 
 
 
 
 
42
 
43
+ """
44
+ For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
+ """
46
  demo = gr.ChatInterface(
47
+ respond,
 
 
 
 
 
48
  additional_inputs=[
49
+ gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
50
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
51
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
 
 
 
 
 
 
52
  gr.Slider(
53
+ minimum=0.1,
54
+ maximum=1.0,
55
+ value=0.95,
56
+ step=0.05,
57
+ label="Top-p (nucleus sampling)",
 
58
  ),
59
  ],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  )
61
 
62
+
63
  if __name__ == "__main__":
64
+ demo.launch()