import logging from llms import llm # from llms.openai import llm from prompts import QA_PROMPT from prompts import SUMMARIZATION_PROMPT logger = logging.getLogger(__name__) def get_qa_chain(): logger.info("Creating QA chain") print("> Creating QA chain") try: # qa_chain = QA_PROMPT qa_chain = QA_PROMPT | llm logger.info("QA chain created") print("> QA chain created") return qa_chain except Exception as e: msg = f"Error: {e}" logger.exception(msg) print(msg) raise e def get_summarization_chain(): logger.info("Creating summarization chain") print("> Creating summarization chain") try: # summarization_chain = SUMMARIZATION_PROMPT summarization_chain = SUMMARIZATION_PROMPT | llm logger.info("Summarization chain created") print("> Summarization chain created") return summarization_chain except Exception as e: msg = f"Error: {e}" logger.exception(msg) print(msg) raise e qa_chain = get_qa_chain() summarization_chain = get_summarization_chain()