from langchain_core.documents import Document from typing import Union from langchain_core.messages import BaseMessage, HumanMessage, AIMessage def fake_token_counter(messages: Union[list[BaseMessage], BaseMessage]) -> int: if isinstance(messages, list): return sum(len(message.content.split()) for message in messages) return len(messages.content.split()) def convert_list_context_source_to_str(contexts: list[Document]): formatted_str = "" for i, context in enumerate(contexts): formatted_str += f"Document index {i}:\nContent: {context.page_content}\n" formatted_str += "----------------------------------------------\n\n" return formatted_str def convert_message(messages): list_message = [] for message in messages: if message["type"] == "human": list_message.append(HumanMessage(content=message["content"])) else: list_message.append(AIMessage(content=message["content"])) return list_message