Upload folder using huggingface_hub
Browse files- Test_RAG.py +5 -1
Test_RAG.py
CHANGED
@@ -301,7 +301,11 @@ llm = HuggingFacePipeline.from_model_id(
|
|
301 |
},
|
302 |
pipeline_kwargs={"max_new_tokens": 2},
|
303 |
)
|
304 |
-
|
|
|
|
|
|
|
|
|
305 |
llm.invoke("2 + 2 =")
|
306 |
import re
|
307 |
from typing import List
|
|
|
301 |
},
|
302 |
pipeline_kwargs={"max_new_tokens": 2},
|
303 |
)
|
304 |
+
# 设置 pad_token_id 为 eos_token_id
|
305 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
306 |
+
tokenizer = AutoTokenizer.from_pretrained(llm)
|
307 |
+
if tokenizer.pad_token_id is None:
|
308 |
+
tokenizer.pad_token_id = llm.config.eos_token_id
|
309 |
llm.invoke("2 + 2 =")
|
310 |
import re
|
311 |
from typing import List
|