File size: 700 Bytes
fc213de
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
# list_modules.py
from transformers import AutoModelForCausalLM

# 修改为你的模型路径
model_path = "/home/yq238/project_pi_aaa247/yq238/qwen_training/models/Qwen-7B-Chat"

model = AutoModelForCausalLM.from_pretrained(
    model_path,
    device_map="auto",
    trust_remote_code=True,
    torch_dtype="auto"
)

print("🔍 模型中包含 'proj' 的模块名:")
for name, module in model.named_modules():
    if 'proj' in name.lower():
        print(name)

print("\n🔍 模型中包含 'attn' 的模块名(可能包含注意力层):")
for name, module in model.named_modules():
    if 'attn' in name.lower() and any(x in name for x in ['q_', 'k_', 'v_', 'o_']):
        print(name)