Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -24,12 +24,13 @@ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
|
24 |
translator = None
|
25 |
whisper_model = None
|
26 |
|
|
|
27 |
def load_hausa_model():
|
28 |
global translator
|
29 |
if translator is None:
|
30 |
model_name = "LocaleNLP/english_hausa"
|
31 |
-
model = AutoModelForSeq2SeqLM.from_pretrained(model_name).to(device)
|
32 |
-
tokenizer = MarianTokenizer.from_pretrained(model_name)
|
33 |
translator = pipeline("translation", model=model, tokenizer=tokenizer, device=0 if device.type == 'cuda' else -1)
|
34 |
return translator
|
35 |
|
|
|
24 |
translator = None
|
25 |
whisper_model = None
|
26 |
|
27 |
+
HF_TOKEN = os.getenv("HF_TOKEN")
|
28 |
def load_hausa_model():
|
29 |
global translator
|
30 |
if translator is None:
|
31 |
model_name = "LocaleNLP/english_hausa"
|
32 |
+
model = AutoModelForSeq2SeqLM.from_pretrained(model_name, token=HF_TOKEN).to(device)
|
33 |
+
tokenizer = MarianTokenizer.from_pretrained(model_name, token=HF_TOKEN)
|
34 |
translator = pipeline("translation", model=model, tokenizer=tokenizer, device=0 if device.type == 'cuda' else -1)
|
35 |
return translator
|
36 |
|