Update app.py
Browse files
app.py
CHANGED
|
@@ -9,7 +9,7 @@ from ctransformers import AutoModelForCausalLM as CAutoModelForCausalLM
|
|
| 9 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 10 |
from interpret import InterpretationPrompt
|
| 11 |
|
| 12 |
-
MAX_PROMPT_TOKENS =
|
| 13 |
|
| 14 |
## info
|
| 15 |
dataset_info = [{'name': 'Commonsense', 'hf_repo': 'tau/commonsense_qa', 'text_col': 'question'},
|
|
@@ -112,7 +112,7 @@ use_ctransformers = model_args.pop('ctransformers', False)
|
|
| 112 |
AutoModelClass = CAutoModelForCausalLM if use_ctransformers else AutoModelForCausalLM
|
| 113 |
|
| 114 |
# get model
|
| 115 |
-
model = AutoModelClass.from_pretrained(model_path, **model_args)
|
| 116 |
tokenizer = AutoTokenizer.from_pretrained(tokenizer_path, token=os.environ['hf_token'])
|
| 117 |
|
| 118 |
# demo
|
|
|
|
| 9 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 10 |
from interpret import InterpretationPrompt
|
| 11 |
|
| 12 |
+
MAX_PROMPT_TOKENS = 60
|
| 13 |
|
| 14 |
## info
|
| 15 |
dataset_info = [{'name': 'Commonsense', 'hf_repo': 'tau/commonsense_qa', 'text_col': 'question'},
|
|
|
|
| 112 |
AutoModelClass = CAutoModelForCausalLM if use_ctransformers else AutoModelForCausalLM
|
| 113 |
|
| 114 |
# get model
|
| 115 |
+
model = AutoModelClass.from_pretrained(model_path, **model_args).cuda()
|
| 116 |
tokenizer = AutoTokenizer.from_pretrained(tokenizer_path, token=os.environ['hf_token'])
|
| 117 |
|
| 118 |
# demo
|