Upload folder using huggingface_hub
Browse files
README.md
CHANGED
@@ -91,7 +91,7 @@ mgen test --model SequenceClassification --model.backbone aido_ragprotein_16b --
|
|
91 |
```python
|
92 |
import torch
|
93 |
from modelgenerator.tasks import Embed
|
94 |
-
model = Embed.from_config({"model.backbone": "
|
95 |
model.backbone.max_length = 12800
|
96 |
data = torch.load("ModelGenerator/experiments/AIDO.RAGPLM/examples.pt", 'cpu')[0]
|
97 |
transformed_batch = model.transform(data)
|
@@ -106,7 +106,7 @@ print(embedding.shape)
|
|
106 |
```python
|
107 |
import torch
|
108 |
from modelgenerator.tasks import SequenceClassification
|
109 |
-
model = SequenceClassification.from_config({"model.backbone": "
|
110 |
model.backbone.max_length = 12800
|
111 |
data = torch.load("ModelGenerator/experiments/AIDO.RAGPLM/examples.pt", 'cpu')[0]
|
112 |
transformed_batch = model.transform(data)
|
@@ -122,7 +122,7 @@ print(torch.argmax(logits, dim=-1))
|
|
122 |
```python
|
123 |
import torch
|
124 |
from modelgenerator.tasks import TokenClassification
|
125 |
-
model = TokenClassification.from_config({"model.backbone": "
|
126 |
model.backbone.max_length = 12800
|
127 |
data = torch.load("ModelGenerator/experiments/AIDO.RAGPLM/examples.pt", 'cpu')[0]
|
128 |
transformed_batch = model.transform(data)
|
@@ -137,7 +137,7 @@ print(torch.argmax(logits, dim=-1))
|
|
137 |
|
138 |
```python
|
139 |
from modelgenerator.tasks import SequenceRegression
|
140 |
-
model = SequenceRegression.from_config({"model.backbone": "
|
141 |
model.backbone.max_length = 12800
|
142 |
data = torch.load("ModelGenerator/experiments/AIDO.RAGPLM/examples.pt", 'cpu')[0]
|
143 |
transformed_batch = model.transform(data)
|
|
|
91 |
```python
|
92 |
import torch
|
93 |
from modelgenerator.tasks import Embed
|
94 |
+
model = Embed.from_config({"model.backbone": "aido_ragprotein_16b"}).eval()
|
95 |
model.backbone.max_length = 12800
|
96 |
data = torch.load("ModelGenerator/experiments/AIDO.RAGPLM/examples.pt", 'cpu')[0]
|
97 |
transformed_batch = model.transform(data)
|
|
|
106 |
```python
|
107 |
import torch
|
108 |
from modelgenerator.tasks import SequenceClassification
|
109 |
+
model = SequenceClassification.from_config({"model.backbone": "aido_ragprotein_16b", "model.n_classes": 2}).eval()
|
110 |
model.backbone.max_length = 12800
|
111 |
data = torch.load("ModelGenerator/experiments/AIDO.RAGPLM/examples.pt", 'cpu')[0]
|
112 |
transformed_batch = model.transform(data)
|
|
|
122 |
```python
|
123 |
import torch
|
124 |
from modelgenerator.tasks import TokenClassification
|
125 |
+
model = TokenClassification.from_config({"model.backbone": "aido_ragprotein_16b", "model.n_classes": 3}).eval()
|
126 |
model.backbone.max_length = 12800
|
127 |
data = torch.load("ModelGenerator/experiments/AIDO.RAGPLM/examples.pt", 'cpu')[0]
|
128 |
transformed_batch = model.transform(data)
|
|
|
137 |
|
138 |
```python
|
139 |
from modelgenerator.tasks import SequenceRegression
|
140 |
+
model = SequenceRegression.from_config({"model.backbone": "aido_ragprotein_16b"}).eval()
|
141 |
model.backbone.max_length = 12800
|
142 |
data = torch.load("ModelGenerator/experiments/AIDO.RAGPLM/examples.pt", 'cpu')[0]
|
143 |
transformed_batch = model.transform(data)
|