File size: 4,369 Bytes
1e803da
 
 
 
 
 
29633eb
 
 
f0f75ab
1e803da
 
 
 
 
 
 
 
 
 
 
88f3635
f0f75ab
1e803da
f0f75ab
 
 
1e803da
 
f0f75ab
1e803da
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ec51c24
1e803da
 
 
 
 
 
f0f75ab
1e803da
 
 
 
 
 
 
 
 
 
 
 
 
 
 
88f3635
831828a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1e803da
 
831828a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import gradio as gr
from huggingface_hub import InferenceClient
import urllib.request
import xml.etree.ElementTree as ET

# HuggingFace Inference Client
#client = InferenceClient("meta-llama/Llama-3.3-70B-Instruct")
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")



# Funktion, um relevante Studien von arXiv zu suchen
def fetch_arxiv_summary(query, sort_by="relevance", sort_order="descending", max_results=20):
    url = (f'http://export.arxiv.org/api/query?search_query=all:{urllib.parse.quote(query)}'
           f'&start=0&max_results={max_results}&sortBy={sort_by}&sortOrder={sort_order}')
    try:
        data = urllib.request.urlopen(url)
        xml_data = data.read().decode("utf-8")
        root = ET.fromstring(xml_data)
        summaries = []
        for entry in root.findall(".//{http://www.w3.org/2005/Atom}entry"):
            title = entry.find("{http://www.w3.org/2005/Atom}title")
            link_element = entry.find("{http://www.w3.org/2005/Atom}link[@rel='alternate']")
            summary = entry.find("{http://www.w3.org/2005/Atom}summary")
            link = link_element.attrib.get("href") if link_element is not None else "Kein Link verfügbar"
            if summary is not None and title is not None:
                summaries.append(f"Titel: {title.text.strip()}\nLink: {link}\nZusammenfassung: {summary.text.strip()}")
        return summaries if summaries else ["Keine relevanten Studien gefunden."]
    except Exception as e:
        return [f"Fehler beim Abrufen der Studie: {str(e)}"]

# Chatbot-Logik mit arXiv-Integration
def respond(

    message,

    history: list[tuple[str, str]],

    system_message,

    max_tokens,

    temperature,

    top_p,

    sort_by,

    sort_order,

    max_results,

):
    # Query generieren und Studien abrufen
    query = generate_query(message)
    study_summaries = fetch_arxiv_summary(query, sort_by, sort_order, max_results)
    study_info = "\n".join(study_summaries)

    # Nachrichten vorbereiten
    messages = [{"role": "system", "content": f"{system_message} You are a highly capable assistant specializing in parsing and summarizing study abstracts. Your task is to analyze the provided study data, extract relevant information, and offer concise summaries. Always include the study's title and a direct link, ensuring clarity and accessibility.\n"}]
    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    messages.append({"role": "user", "content": f"{message}\nUse this Kontext:\n{study_info}"})

    # Antwort vom Modell generieren
    response = ""
    for message in client.chat_completion(
        messages,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
    ):
        token = message.choices[0].delta.content
        response += token
        yield response

# Gradio-Interface mit zusätzlichen Eingaben

with gr.Blocks() as demo:
    gr.Markdown("""

        ### Helloooooo

      This chatbot uses AI to answer your questions and retrieve relevant studies from the arXiv database.

Enter your specific query in the field below, and the bot will provide you with studies including the title, link, and summary.

    """)
    query_input = gr.Textbox(value="", label="Query", placeholder="Enter your specific search term.")
    chat_interface = gr.ChatInterface(
        respond,
        additional_inputs=[
            gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
            gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
            gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
            gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
            gr.Dropdown(label="Sortieren nach", choices=["relevance", "lastUpdatedDate", "submittedDate"], value="relevance"),
            gr.Dropdown(label="Sortierreihenfolge", choices=["ascending", "descending"], value="descending"),
            gr.Slider(label="Maximale Ergebnisse", minimum=1, maximum=50, value=20, step=1),
        ],
    )

if __name__ == "__main__":
    demo.launch()