Spaces:
Paused
Paused
Commit
·
bd620a9
1
Parent(s):
e7c70b0
putting login back
Browse files
model.py
CHANGED
@@ -10,12 +10,12 @@ model_id = 'meta-llama/Llama-2-13b-chat-hf'
|
|
10 |
|
11 |
if torch.cuda.is_available():
|
12 |
|
13 |
-
|
14 |
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
|
20 |
config = AutoConfig.from_pretrained(model_id,
|
21 |
use_auth_token=True)
|
@@ -30,8 +30,7 @@ if torch.cuda.is_available():
|
|
30 |
)
|
31 |
else:
|
32 |
model = None
|
33 |
-
tokenizer = AutoTokenizer.from_pretrained(model_id
|
34 |
-
use_auth_token=True)
|
35 |
|
36 |
|
37 |
def get_prompt(message: str, chat_history: list[tuple[str, str]],
|
|
|
10 |
|
11 |
if torch.cuda.is_available():
|
12 |
|
13 |
+
tok = os.environ['HF_TOKEN']
|
14 |
|
15 |
+
login(new_session=True,
|
16 |
+
write_permission=False,
|
17 |
+
token=tok
|
18 |
+
)
|
19 |
|
20 |
config = AutoConfig.from_pretrained(model_id,
|
21 |
use_auth_token=True)
|
|
|
30 |
)
|
31 |
else:
|
32 |
model = None
|
33 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
|
|
34 |
|
35 |
|
36 |
def get_prompt(message: str, chat_history: list[tuple[str, str]],
|