|
|
|
|
|
|
|
from transformers import AutoTokenizer, AutoModel |
|
import torch |
|
import onnx |
|
|
|
tokenizer = AutoTokenizer.from_pretrained("ai-forever/sbert_large_nlu_ru") |
|
model = AutoModel.from_pretrained("ai-forever/sbert_large_nlu_ru") |
|
|
|
|
|
dummy_input = tokenizer("Тест", return_tensors="pt", padding=True, truncation=True) |
|
torch.onnx.export(model, |
|
(dummy_input['input_ids'], dummy_input['attention_mask']), |
|
"sbert_large_nlu_ru.onnx", |
|
input_names=["input_ids", "attention_mask"], |
|
output_names=["last_hidden_state"], |
|
dynamic_axes={ |
|
"input_ids": {0: "batch", 1: "sequence"}, |
|
"attention_mask": {0: "batch", 1: "sequence"}, |
|
"last_hidden_state": {0: "batch", 1: "sequence", 2: "features"} |
|
}) |
|
|
|
|
|
|