codegeex2-6b-int4 / save_model.py
Stanislas's picture
initial commit
b6d26a7
raw
history blame
196 Bytes
from transformers import AutoModel
model = AutoModel.from_pretrained("/mnt/vepfs/qinkai/release/codegeex2-6b/", trust_remote_code=True).cuda()
model.save_pretrained("./", max_shard_size="2000MB")