Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from huggingface_hub import InferenceApi | |
| from duckduckgo_search import DDGS | |
| import requests | |
| import json | |
| from typing import List | |
| from pydantic import BaseModel, Field | |
| # Global variables | |
| huggingface_token = os.environ.get("HUGGINGFACE_TOKEN") | |
| # Function to perform a DuckDuckGo search | |
| def duckduckgo_search(query): | |
| with DDGS() as ddgs: | |
| results = ddgs.text(query, max_results=5) | |
| return results | |
| class CitingSources(BaseModel): | |
| sources: List[str] = Field( | |
| ..., | |
| description="List of sources to cite. Should be an URL of the source." | |
| ) | |
| def get_response_with_search(query): | |
| # Perform the web search | |
| search_results = duckduckgo_search(query) | |
| # Use the search results as context for the model | |
| context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n" | |
| for result in search_results if 'body' in result) | |
| # Prompt formatted for Mistral-7B-Instruct | |
| prompt = f"""<s>[INST] Using the following context: | |
| {context} | |
| Write a detailed and complete research document that fulfills the following user request: '{query}' | |
| After writing the document, please provide a list of sources used in your response. [/INST]""" | |
| # API endpoint for Mistral-7B-Instruct-v0.3 | |
| API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3" | |
| # Headers | |
| headers = {"Authorization": f"Bearer {huggingface_token}"} | |
| # Payload | |
| payload = { | |
| "inputs": prompt, | |
| "parameters": { | |
| "max_new_tokens": 1000, | |
| "temperature": 0.7, | |
| "top_p": 0.95, | |
| "top_k": 40, | |
| "repetition_penalty": 1.1 | |
| } | |
| } | |
| # Make the API call | |
| response = requests.post(API_URL, headers=headers, json=payload) | |
| if response.status_code == 200: | |
| result = response.json() | |
| if isinstance(result, list) and len(result) > 0: | |
| generated_text = result[0].get('generated_text', 'No text generated') | |
| # Remove the instruction part | |
| content_start = generated_text.find("[/INST]") | |
| if content_start != -1: | |
| generated_text = generated_text[content_start + 7:].strip() | |
| # Split the response into main content and sources | |
| parts = generated_text.split("Sources:", 1) | |
| main_content = parts[0].strip() | |
| sources = parts[1].strip() if len(parts) > 1 else "" | |
| return main_content, sources | |
| else: | |
| return f"Unexpected response format: {result}", "" | |
| else: | |
| return f"Error: API returned status code {response.status_code}", "" | |
| def gradio_interface(query): | |
| main_content, sources = get_response_with_search(query) | |
| formatted_response = f"{main_content}\n\nSources:\n{sources}" | |
| return formatted_response | |
| # Gradio interface | |
| iface = gr.Interface( | |
| fn=gradio_interface, | |
| inputs=gr.Textbox(lines=2, placeholder="Enter your question here..."), | |
| outputs="text", | |
| title="AI-powered Web Search Assistant", | |
| description="Ask a question, and I'll search the web and provide an answer using the Mistral-7B-Instruct model.", | |
| examples=[ | |
| ["Latest news about Yann LeCun"], | |
| ["Latest news site:github.blog"], | |
| ["Where I can find best hotel in Galapagos, Ecuador intitle:hotel"], | |
| ["filetype:pdf intitle:python"] | |
| ] | |
| ) | |
| if __name__ == "__main__": | |
| iface.launch() |