NeoZ123 commited on
Commit
bae9757
·
verified ·
1 Parent(s): dd80640

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -27,7 +27,7 @@ A simple demo for deployment of the model:
27
  import torch
28
  from transformers import AutoTokenizer, AutoModelForCausalLM
29
 
30
- model_path = "THUDM/LongReward-llama3.1-8b-SFT"
31
  tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
32
  model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map='auto')
33
  context = '''
@@ -44,7 +44,7 @@ You can also deploy the model with [vllm](https://github.com/vllm-project/vllm)
44
  import torch
45
  from vllm import LLM, SamplingParams
46
 
47
- model_path = "THUDM/LongReward-llama3.1-8b-SFT"
48
  model = LLM(
49
  model= model_path,
50
  dtype=torch.bfloat16,
 
27
  import torch
28
  from transformers import AutoTokenizer, AutoModelForCausalLM
29
 
30
+ model_path = "NeoZ123/LongReward-llama3.1-8b-SFT"
31
  tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
32
  model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map='auto')
33
  context = '''
 
44
  import torch
45
  from vllm import LLM, SamplingParams
46
 
47
+ model_path = "NeoZ123/LongReward-llama3.1-8b-SFT"
48
  model = LLM(
49
  model= model_path,
50
  dtype=torch.bfloat16,