Upload folder using huggingface_hub
Browse files- Test_RAG.py +6 -6
Test_RAG.py
CHANGED
@@ -303,12 +303,12 @@ llm = HuggingFacePipeline.from_model_id(
|
|
303 |
)
|
304 |
# 设置 pad_token_id 为 eos_token_id
|
305 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
306 |
-
tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-3.2-3B-Instruct')
|
307 |
-
if tokenizer.pad_token_id is None:
|
308 |
-
|
309 |
-
# 同时确保 HuggingFacePipeline 使用的 tokenizer 也设置了 pad_token_id
|
310 |
-
llm.pipeline.tokenizer.pad_token_id = tokenizer.pad_token_id
|
311 |
-
llm.invoke("2 + 2 =")
|
312 |
import re
|
313 |
from typing import List
|
314 |
from langchain.text_splitter import (
|
|
|
303 |
)
|
304 |
# 设置 pad_token_id 为 eos_token_id
|
305 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
306 |
+
# tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-3.2-3B-Instruct')
|
307 |
+
# if tokenizer.pad_token_id is None:
|
308 |
+
# tokenizer.pad_token_id = tokenizer.eos_token_id
|
309 |
+
# # 同时确保 HuggingFacePipeline 使用的 tokenizer 也设置了 pad_token_id
|
310 |
+
# llm.pipeline.tokenizer.pad_token_id = tokenizer.pad_token_id
|
311 |
+
# llm.invoke("2 + 2 =")
|
312 |
import re
|
313 |
from typing import List
|
314 |
from langchain.text_splitter import (
|