Upload folder using huggingface_hub
Browse files- Test_RAG.py +2 -0
Test_RAG.py
CHANGED
@@ -306,6 +306,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
|
306 |
tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-3.2-3B-Instruct')
|
307 |
if tokenizer.pad_token_id is None:
|
308 |
tokenizer.pad_token_id = tokenizer.eos_token_id
|
|
|
|
|
309 |
llm.invoke("2 + 2 =")
|
310 |
import re
|
311 |
from typing import List
|
|
|
306 |
tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-3.2-3B-Instruct')
|
307 |
if tokenizer.pad_token_id is None:
|
308 |
tokenizer.pad_token_id = tokenizer.eos_token_id
|
309 |
+
# 同时确保 HuggingFacePipeline 使用的 tokenizer 也设置了 pad_token_id
|
310 |
+
llm.pipeline.tokenizer.pad_token_id = tokenizer.pad_token_id
|
311 |
llm.invoke("2 + 2 =")
|
312 |
import re
|
313 |
from typing import List
|