Sebbe33 commited on
Commit
2384651
·
verified ·
1 Parent(s): 2663d44

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -44
app.py CHANGED
@@ -27,64 +27,59 @@ def fetch_arxiv_summary(query, sort_by="relevance", sort_order="descending", max
27
  summaries.append(f"Title: {title.text.strip()}\n Link: {link}\n Abstract: {summary.text.strip()}")
28
  return summaries if summaries else ["No Studies found"]
29
  except Exception as e:
30
- return [f"Fehler beim Abrufen der Studie: {str(e)}"]
31
-
32
- # Chatbot-Logik mit arXiv-Integration
33
- def respond(
34
- message,
35
- history: list[tuple[str, str]],
36
- system_message,
37
- sort_by,
38
- sort_order,
39
- max_results,
40
- query
41
- ):
42
- # Query generieren und Studien abrufen
43
  study_summaries = fetch_arxiv_summary(query, sort_by, sort_order, max_results)
44
  study_info = "\n".join(study_summaries)
45
 
46
- # Nachrichten vorbereiten
47
- #messages = [{"role": "system", "content": f"{system_message} You are a highly capable assistant specializing in parsing and summarizing study abstracts. Your task is to analyze the provided study data, extract relevant information, and offer concise summaries. Always include the study's title and a direct link, ensuring clarity and accessibility.\n"}]
48
- messages = [{"role": "system", "content": f"{system_message} You are a highly capable assistant specializing in parsing and summarizing study abstracts. Your task is to analyze the provided study data, extract relevant information, and offer concise summaries. Always include the study's title and a direct link, ensuring clarity and accessibility. The data will be provided as a list of strings, where each string contains details about a study in the following format: 'Title: [Study Title]\\nLink: [URL]\\nSummary: [Study Abstract]'. Process each entry separately, ensuring accuracy and readability in your summaries.\n"}]
49
-
50
- for val in history:
51
- if val[0]:
52
- messages.append({"role": "user", "content": val[0]})
53
- if val[1]:
54
- messages.append({"role": "assistant", "content": val[1]})
55
 
56
- messages.append({"role": "user", "content": f"{message}\nUse this Kontext (Studies):\n{study_info}"})
 
 
57
 
58
- # Antwort vom Modell generieren
59
  response = ""
60
  for message in client.chat_completion(
61
  messages,
62
- stream=True
 
 
 
63
  ):
64
  token = message.choices[0].delta.content
65
  response += token
66
- yield response
67
 
68
- # Gradio-Interface mit zusätzlichen Eingaben
 
 
 
 
 
69
 
70
  with gr.Blocks() as demo:
71
- gr.Markdown("""Helloooooo
72
- This chatbot uses AI to answer your questions and retrieve relevant studies from the arXiv database.
73
- Enter your specific query in the field below, and the bot will provide you with studies including the title, link, and summary.
74
- """)
75
-
76
-
77
- chat_interface = gr.ChatInterface(
78
- respond,
79
- additional_inputs=[
80
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
81
- gr.Dropdown(label="Sortby", choices=["relevance", "lastUpdatedDate", "submittedDate"], value="relevance"),
82
- gr.Dropdown(label="Sort", choices=["ascending", "descending"], value="descending"),
83
- gr.Slider(label="MaxEntries", minimum=1, maximum=50, value=20, step=1),
84
- gr.Textbox(value="", label="Query", placeholder="Enter your specific search term.")
85
-
86
- ],
 
87
  )
88
 
 
89
  if __name__ == "__main__":
90
- demo.launch()
 
 
27
  summaries.append(f"Title: {title.text.strip()}\n Link: {link}\n Abstract: {summary.text.strip()}")
28
  return summaries if summaries else ["No Studies found"]
29
  except Exception as e:
30
+ return [f"Error: {str(e)}"]
31
+
32
+
33
+ # Funktion, um die Ergebnisse zu generieren
34
+ def search_and_generate(query, sort_by, sort_order, max_results, system_message, max_tokens, temperature, top_p):
35
+ # Studieninformationen abrufen
 
 
 
 
 
 
 
36
  study_summaries = fetch_arxiv_summary(query, sort_by, sort_order, max_results)
37
  study_info = "\n".join(study_summaries)
38
 
 
 
 
 
 
 
 
 
 
39
 
40
+ # Anfrage an das Modell
41
+ messages = [{"role": "system", "content": system_message},
42
+ {"role": "user", "content": f"Studies:\n{study_info}"}]
43
 
 
44
  response = ""
45
  for message in client.chat_completion(
46
  messages,
47
+ max_tokens=max_tokens,
48
+ stream=True,
49
+ temperature=temperature,
50
+ top_p=top_p,
51
  ):
52
  token = message.choices[0].delta.content
53
  response += token
 
54
 
55
+ return f"Studies:\n{study_info}\n\nAnswer of the modell:\n{response}"
56
+
57
+
58
+ # Gradio-Interface mit Eingabe- und Ausgabefeldern
59
+ def create_intro_text():
60
+ return ("Enter your specific query in the field below, and the bot will provide you with studies including the title, link, and summary.")
61
 
62
  with gr.Blocks() as demo:
63
+ gr.Markdown(create_intro_text())
64
+ query_input = gr.Textbox(value="", label="Query (arxiv API)", placeholder="Geben Sie Ihren spezifischen Suchbegriff ein.")
65
+ sort_by = gr.Dropdown(label="Sortby", choices=["relevance", "lastUpdatedDate", "submittedDate"], value="relevance")
66
+ sort_order = gr.Dropdown(label="Sort", choices=["ascending", "descending"], value="descending")
67
+ max_results = gr.Slider(label="Max Entries", minimum=1, maximum=50, value=20, step=1)
68
+ system_message = gr.Textbox(value="You are a highly capable assistant specializing in parsing and summarizing study abstracts. Your task is to analyze the provided study data, extract relevant information, and offer concise summaries. Always include the study's title and a direct link, ensuring clarity and accessibility. The data will be provided as a list of strings, where each string contains details about a study in the following format: 'Title: [Study Title]\\nLink: [URL]\\nSummary: [Study Abstract]'. Process each entry separately, ensuring accuracy and readability in your summaries.", label="Systemnachricht")
69
+ max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max Token")
70
+ temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
71
+ top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
72
+
73
+ output_box = gr.Textbox(label="Results", placeholder="Result...", lines=10)
74
+ search_button = gr.Button("Search")
75
+
76
+ search_button.click(
77
+ fn=search_and_generate,
78
+ inputs=[query_input, sort_by, sort_order, max_results, system_message, max_tokens, temperature, top_p],
79
+ outputs=output_box
80
  )
81
 
82
+
83
  if __name__ == "__main__":
84
+ demo.launch()
85
+