Update README.md
Browse files
README.md
CHANGED
@@ -56,8 +56,8 @@ Mamba2Config(
|
|
56 |
```python
|
57 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
58 |
|
59 |
-
tokenizer = AutoTokenizer.from_pretrained("loim/whiff-20M")
|
60 |
-
model = AutoModelForCausalLM.from_pretrained("loim/whiff-20M")
|
61 |
|
62 |
def chat(messages, temp=0.5):
|
63 |
inputs = tokenizer.apply_chat_template(messages, return_tensors="pt")
|
@@ -154,8 +154,8 @@ Mamba2Config(
|
|
154 |
```python
|
155 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
156 |
|
157 |
-
tokenizer = AutoTokenizer.from_pretrained("loim/whiff-20M")
|
158 |
-
model = AutoModelForCausalLM.from_pretrained("loim/whiff-20M")
|
159 |
|
160 |
def chat(messages, temp=0.5):
|
161 |
inputs = tokenizer.apply_chat_template(messages, return_tensors="pt")
|
|
|
56 |
```python
|
57 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
58 |
|
59 |
+
tokenizer = AutoTokenizer.from_pretrained("loim/whiff-mamba2-20M")
|
60 |
+
model = AutoModelForCausalLM.from_pretrained("loim/whiff-mamba2-20M")
|
61 |
|
62 |
def chat(messages, temp=0.5):
|
63 |
inputs = tokenizer.apply_chat_template(messages, return_tensors="pt")
|
|
|
154 |
```python
|
155 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
156 |
|
157 |
+
tokenizer = AutoTokenizer.from_pretrained("loim/whiff-mamba2-20M")
|
158 |
+
model = AutoModelForCausalLM.from_pretrained("loim/whiff-mamba2-20M")
|
159 |
|
160 |
def chat(messages, temp=0.5):
|
161 |
inputs = tokenizer.apply_chat_template(messages, return_tensors="pt")
|