import json
import gradio as gr
from utils.logging_util import logger
from models.cpp_qwen2 import bot
# from models.hf_qwen2 import bot


#
# def postprocess(self, y):
#     if y is None:
#         return []
#     for i, (message, response) in enumerate(y):
#         y[i] = (
#             None if message is None else mdtex2html.convert((message)),
#             None if response is None else mdtex2html.convert(response),
#         )
#     return y
#
# gr.Chatbot.postprocess = postprocess


def generate_query(chatbot, history):
    if history and history[-1]["role"] == "user":
        gr.Warning('You should generate assistant-response.')
        yield None, chatbot, history
    else:
        chatbot.append(None)
        streamer = bot.generate_query(history, stream=True)
        for query in streamer:
            chatbot[-1] = (query, None)
            yield query, chatbot, history
        history.append({"role": "user", "content": query})
        yield query, chatbot, history


def generate_response(chatbot, history, user_input=None):
    """
    auto-mode:query is None
    manual-mode:query 是用户输入
    :param chatbot:
    :param history:
    :return:
    """
    if user_input and history[-1]["role"] != "user":
        history.append({"role": "user", "content": user_input})
    query = history[-1]["content"]

    if history[-1]["role"] != "user":
        gr.Warning('You should generate or type user-input first.')
        yield None, chatbot, history
    else:
        streamer = bot.generate_response(history, stream=True)
        for response in streamer:
            chatbot[-1] = (query, response)
            yield response, chatbot, history

        history.append({"role": "assistant", "content": response})
        print(f"chatbot is {chatbot}")
        print(f"history is {history}")
        yield response, chatbot, history


def generate(chatbot, history):
    logger.info(f"chatbot: {chatbot}; history: {history}")
    streamer = None
    if history[-1]["role"] in ["assistant", "system"]:
        streamer = generate_query(chatbot, history)
    elif history[-1]["role"] == "user":
        streamer = generate_response(chatbot, history)
    else:
        gr.Warning("bug")

    for out in streamer:
        yield out


def undo_generate(chatbot, history):
    if history[-1]["role"] == "user":
        history = history[:-1]
        chatbot = chatbot[:-1]
    elif history[-1]["role"] == "assistant":
        history = history[:-1]
        chatbot[-1] = (chatbot[-1][0], None)
    else:
        pass
    logger.info(f"after undo, {json.dumps(chatbot, ensure_ascii=False)}, {json.dumps(history, ensure_ascii=False)}")
    return "", chatbot, history


def reset_user_input():
    return gr.update(value='')


def reset_state(system):
    return [], [{"role": "system", "content": system}]


def set_max_tokens(max_tokens):
    bot.generation_kwargs["max_tokens"] = max_tokens


def set_top_p(top_p):
    bot.generation_kwargs["top_p"] = top_p


def set_temperature(temperature):
    bot.generation_kwargs["temperature"] = temperature