import os from transformers import AutoTokenizer # MODEL_PATH = "meta-llama/Llama-3.1-405B-Instruct" # MODEL_PATH = "NousResearch/Hermes-3-Llama-3.1-405B" # messages里不支持tool_calls # MODEL_PATH = "../../test/Llama-4-Maverick-17B-128E-Instruct" # MODEL_PATH = "meta-llama/Llama-4-Maverick-17B-128E-Instruct" # MODEL_PATH = "Qwen/Qwen3-235B-A22B-Instruct-2507" # MODEL_PATH = "Qwen/Qwen3-235B-A22B-Thinking-2507" # MODEL_PATH = "Qwen/Qwen3-235B-A22B" # MODEL_PATH = "Qwen/Qwen2.5-72B-Instruct" MODEL_PATH = "Qwen/QwQ-32B" # MODEL_PATH = "mistralai/Mistral-7B-Instruct-v0.1" # messages里不支持tool_calls,不支持 role=tool,不支持 tools # MODEL_PATH = "mistralai/Ministral-8B-Instruct-2410" # 支持 tools, 支持tool_calls(必须要有id), 格式非主流 # MODEL_PATH = "deepseek-ai/DeepSeek-R1" # MODEL_PATH = "deepseek-ai/DeepSeek-R1-0528" # MODEL_PATH = 'deepseek-ai/DeepSeek-V3.1' # MODEL_PATH = "google/gemma-3-27b-it" tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH) chat_template = tokenizer.chat_template output_dir = MODEL_PATH.split("/")[-1] os.makedirs(output_dir, exist_ok=True) if isinstance(chat_template, dict): for k, v in chat_template.items(): with open(f"{output_dir}/chat_template.{k}.jinja", "w") as f_out: f_out.write(v) else: # chat_template = chat_template.replace("\\n", "\n") with open(f"{output_dir}/chat_template.jinja", "w") as f_out: f_out.write(chat_template)