""" ## deepseek v3.1 解析错误,都放在了 reasoning_content 字段里了 - https://github.com/vllm-project/vllm/issues/23429 - 处理 https://github.com/vllm-project/vllm/pull/23437 """ from transformers import AutoTokenizer MODEL_PATH = "deepseek-ai/DeepSeek-V3.1" tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH) messages = [ {"role": "system", "content": "You are a bot that responds to weather queries."}, {"role": "user", "content": "Hey, what's the temperature in Paris right now?"}, {"role": "assistant", "content": "2"}, {"role": "user", "content": "3"}, ] prompt = tokenizer.apply_chat_template( conversation=messages, tokenize=False, thinking=True, add_generation_prompt=True, ) print(prompt)