File size: 2,682 Bytes
12cca3e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import openai
import gradio as gr
from full_chain import get_response
import os
import logging

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('app.log'),
        logging.StreamHandler()
    ]
)
logger = logging.getLogger(__name__)

# Initialize OpenAI client
try:
    api_key = os.getenv("OPENAI_API_KEY")
    if not api_key:
        raise ValueError("OPENAI_API_KEY environment variable not set")
    client = openai.OpenAI(api_key=api_key)
    logger.info("OpenAI client initialized successfully")
except Exception as e:
    logger.error(f"Failed to initialize OpenAI client: {str(e)}")
    raise

def create_hyperlink(url, title, domain):
    """Create HTML hyperlink with domain information."""
    return f"<a href='{url}'>{title}</a> ({domain})"

def predict(message, history):
    """Process user message and return response with source links."""
    try:
        logger.info(f"Processing new query: {message}")
        
        # Get response from the chain
        responder, links, titles, domains = get_response(message, rerank_type="crossencoder")
        logger.info(f"Received response with {len(links)} sources")
        
        # Create hyperlinks for sources
        formatted_links = [create_hyperlink(link, title, domain) 
                         for link, title, domain in zip(links, titles, domains)]
        
        # Combine response with sources
        out = responder + "\n" + "\n".join(formatted_links)
        
        logger.info("Response generated successfully")
        return out

    except Exception as e:
        error_msg = f"Error processing query: {str(e)}"
        logger.error(error_msg)
        return f"An error occurred while processing your request: {str(e)}"

# Define example queries
EXAMPLE_QUERIES = [
    "How many Americans Smoke?",
    "What are some measures taken by the Indian Government to reduce the smoking population?",
    "Does smoking negatively affect my health?"
]

# Initialize and launch Gradio interface
def main():
    try:
        interface = gr.ChatInterface(
            predict,
            examples=EXAMPLE_QUERIES,
            title="Tobacco Information Assistant",
            description="Ask questions about tobacco-related topics and get answers with reliable sources."
        )
        logger.info("Starting Gradio interface")
        interface.launch()
    except Exception as e:
        logger.error(f"Failed to launch Gradio interface: {str(e)}")
        raise

if __name__ == "__main__":
    main()