Canstralian commited on
Commit
fe55ee3
·
verified ·
1 Parent(s): ac0c71d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -18
app.py CHANGED
@@ -1,6 +1,17 @@
1
  import gradio as gr
 
 
2
 
3
- python_code = """
 
 
 
 
 
 
 
 
 
4
  def fib(n):
5
  if n <= 0:
6
  return 0
@@ -8,37 +19,57 @@ def fib(n):
8
  return 1
9
  else:
10
  return fib(n-1) + fib(n-2)
11
- """
12
-
13
- js_code = """
14
  function fib(n) {
15
  if (n <= 0) return 0;
16
  if (n === 1) return 1;
17
  return fib(n - 1) + fib(n - 2);
18
  }
19
  """
 
 
20
 
21
- def chat(message, history):
22
- if "python" in message.lower():
23
- return "Type Python or JavaScript to see the code.", gr.Code(language="python", value=python_code)
24
- elif "javascript" in message.lower():
25
- return "Type Python or JavaScript to see the code.", gr.Code(language="javascript", value=js_code)
 
 
 
 
 
 
 
 
 
 
 
 
26
  else:
27
- return "Please ask about Python or JavaScript.", None
28
 
 
29
  with gr.Blocks() as demo:
30
- code = gr.Code(render=False)
31
  with gr.Row():
32
  with gr.Column():
33
- gr.Markdown("<center><h1>Write Python or JavaScript</h1></center>")
34
  gr.ChatInterface(
35
- chat,
36
- examples=["Python", "JavaScript"],
37
- additional_outputs=[code],
 
 
 
 
 
38
  type="messages"
39
  )
40
  with gr.Column():
41
- gr.Markdown("<center><h1>Code Artifacts</h1></center>")
42
- code.render()
43
 
44
- demo.launch()
 
 
1
  import gradio as gr
2
+ from huggingface_hub import InferenceClient
3
+ from transformers import pipeline
4
 
5
+ # Hugging Face Inference API Client
6
+ client = InferenceClient("bigscience/bloom")
7
+
8
+ # Hugging Face Transformers Pipeline for Question Answering
9
+ qa_pipeline = pipeline("question-answering", model="distilbert-base-cased-distilled-squad")
10
+
11
+ # Code Snippets
12
+ code_snippets = {
13
+ "fibonacci": {
14
+ "python": """
15
  def fib(n):
16
  if n <= 0:
17
  return 0
 
19
  return 1
20
  else:
21
  return fib(n-1) + fib(n-2)
22
+ """,
23
+ "javascript": """
 
24
  function fib(n) {
25
  if (n <= 0) return 0;
26
  if (n === 1) return 1;
27
  return fib(n - 1) + fib(n - 2);
28
  }
29
  """
30
+ }
31
+ }
32
 
33
+ # Chatbot Function
34
+ def chatbot(message, history):
35
+ if "python" in message.lower() and "fibonacci" in message.lower():
36
+ return "Here is the Fibonacci code in Python:", gr.Code(language="python", value=code_snippets["fibonacci"]["python"])
37
+ elif "javascript" in message.lower() and "fibonacci" in message.lower():
38
+ return "Here is the Fibonacci code in JavaScript:", gr.Code(language="javascript", value=code_snippets["fibonacci"]["javascript"])
39
+ elif "huggingface" in message.lower():
40
+ # Generate text using the Hugging Face Inference API
41
+ prompt = "Write a short poem about cybersecurity."
42
+ response = client.text_generation(prompt, max_length=50)
43
+ return f"Hugging Face Generated Text: {response['generated_text']}", None
44
+ elif "question" in message.lower():
45
+ # Use the QA pipeline to answer a question
46
+ question = "What is the purpose of cybersecurity?"
47
+ context = "Cybersecurity involves protecting systems, networks, and programs from digital attacks."
48
+ result = qa_pipeline(question=question, context=context)
49
+ return f"Hugging Face QA Answer: {result['answer']}", None
50
  else:
51
+ return "Please ask about Python/JavaScript code or Hugging Face functionalities.", None
52
 
53
+ # Gradio Interface
54
  with gr.Blocks() as demo:
55
+ code_output = gr.Code(render=False)
56
  with gr.Row():
57
  with gr.Column():
58
+ gr.Markdown("<center><h1>Chat About Code or Hugging Face</h1></center>")
59
  gr.ChatInterface(
60
+ chatbot,
61
+ examples=[
62
+ "Python Fibonacci",
63
+ "JavaScript Fibonacci",
64
+ "HuggingFace: Generate text",
65
+ "Ask a question about cybersecurity",
66
+ ],
67
+ additional_outputs=[code_output],
68
  type="messages"
69
  )
70
  with gr.Column():
71
+ gr.Markdown("<center><h1>Code or Model Output</h1></center>")
72
+ code_output.render()
73
 
74
+ # Launch the Gradio App
75
+ demo.launch()