sitammeur commited on
Commit
bcdb617
·
verified ·
1 Parent(s): 35e89bb

Update src/app/model.py

Browse files
Files changed (1) hide show
  1. src/app/model.py +10 -8
src/app/model.py CHANGED
@@ -1,7 +1,7 @@
1
  # Necessary imports
2
- # import os
3
  import sys
4
- # from dotenv import load_dotenv
5
  from typing import Any
6
  import torch
7
  from transformers import AutoModel, AutoTokenizer
@@ -11,11 +11,11 @@ from src.logger import logging
11
  from src.exception import CustomExceptionHandling
12
 
13
 
14
- # # Load the Environment Variables from .env file
15
- # load_dotenv()
16
 
17
- # # Access token for using the model
18
- # access_token = os.environ.get("ACCESS_TOKEN")
19
 
20
 
21
  def load_model_and_tokenizer(model_name: str, device: str) -> Any:
@@ -37,10 +37,12 @@ def load_model_and_tokenizer(model_name: str, device: str) -> Any:
37
  trust_remote_code=True,
38
  attn_implementation="sdpa",
39
  torch_dtype=torch.bfloat16,
40
- # token=access_token
41
  )
42
  model = model.to(device=device)
43
- tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
 
 
44
  model.eval()
45
 
46
  # Log the successful loading of the model and tokenizer
 
1
  # Necessary imports
2
+ import os
3
  import sys
4
+ from dotenv import load_dotenv
5
  from typing import Any
6
  import torch
7
  from transformers import AutoModel, AutoTokenizer
 
11
  from src.exception import CustomExceptionHandling
12
 
13
 
14
+ # Load the Environment Variables from .env file
15
+ load_dotenv()
16
 
17
+ # Access token for using the model
18
+ access_token = os.environ.get("ACCESS_TOKEN")
19
 
20
 
21
  def load_model_and_tokenizer(model_name: str, device: str) -> Any:
 
37
  trust_remote_code=True,
38
  attn_implementation="sdpa",
39
  torch_dtype=torch.bfloat16,
40
+ token=access_token
41
  )
42
  model = model.to(device=device)
43
+ tokenizer = AutoTokenizer.from_pretrained(
44
+ model_name, trust_remote_code=True, token=access_token
45
+ )
46
  model.eval()
47
 
48
  # Log the successful loading of the model and tokenizer