binglinchengxia commited on
Commit
8fc2014
·
verified ·
1 Parent(s): 007a63a

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -31,7 +31,7 @@ KwaiCoder-23BA4-v1 is the latest open-source self-developed code completion mode
31
  ```python
32
  from transformers import AutoModelForCausalLM, AutoTokenizer
33
  import torch
34
- model_id = "Kwaipilot/KwaiCoder-DS-V2-Lite-Base"
35
  tokenizer = AutoTokenizer.from_pretrained(model_id,trust_remote_code=True)
36
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype=torch.bfloat16,trust_remote_code=True)
37
  text = "#write a quick sort algorithm"
@@ -43,7 +43,7 @@ print(tokenizer.decode(outputs[0], skip_special_tokens=True)[len(text):])
43
  ```python
44
  from transformers import AutoModelForCausalLM, AutoTokenizer
45
  import torch
46
- model_id = "Kwaipilot/KwaiCoder-DS-V2-Lite-Base"
47
  tokenizer = AutoTokenizer.from_pretrained(model_id,trust_remote_code=True)
48
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype=torch.bfloat16,trust_remote_code=True)
49
  text = """<|fim▁begin|>def find_longest_substring(s):
@@ -62,7 +62,7 @@ print(tokenizer.decode(outputs[0], skip_special_tokens=True)[len(text):])
62
  ```
63
 
64
  ## 3.License
65
- This code repository is licensed under the MIT License. The use of KwaiCoder-DS-V2-Lite-Base models is subject to the Model License.
66
 
67
  ## 4.BibTex
68
  ```BibTex
 
31
  ```python
32
  from transformers import AutoModelForCausalLM, AutoTokenizer
33
  import torch
34
+ model_id = "Kwaipilot/KwaiCoder-23B-A4B-v1"
35
  tokenizer = AutoTokenizer.from_pretrained(model_id,trust_remote_code=True)
36
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype=torch.bfloat16,trust_remote_code=True)
37
  text = "#write a quick sort algorithm"
 
43
  ```python
44
  from transformers import AutoModelForCausalLM, AutoTokenizer
45
  import torch
46
+ model_id = "Kwaipilot/KwaiCoder-23B-A4B-v1"
47
  tokenizer = AutoTokenizer.from_pretrained(model_id,trust_remote_code=True)
48
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype=torch.bfloat16,trust_remote_code=True)
49
  text = """<|fim▁begin|>def find_longest_substring(s):
 
62
  ```
63
 
64
  ## 3.License
65
+ This code repository is licensed under the MIT License.
66
 
67
  ## 4.BibTex
68
  ```BibTex