Spaces:
Sleeping
Sleeping
OpeoluwaAdekoya
commited on
Commit
•
3b0d0e7
1
Parent(s):
3283d19
Update app.py
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, GPTQConfig
|
|
10 |
import torch
|
11 |
|
12 |
# Define the repository where your model is saved
|
13 |
-
model_repo = "Dumele/
|
14 |
|
15 |
# Load the tokenizer from the repository
|
16 |
tokenizer = AutoTokenizer.from_pretrained(model_repo)
|
@@ -49,7 +49,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
|
49 |
import gradio as gr
|
50 |
|
51 |
# Define the repository where your model is saved
|
52 |
-
model_repo = "Dumele/
|
53 |
|
54 |
# Load the tokenizer from the repository
|
55 |
tokenizer = AutoTokenizer.from_pretrained(model_repo)
|
|
|
10 |
import torch
|
11 |
|
12 |
# Define the repository where your model is saved
|
13 |
+
model_repo = "Dumele/autotrain-shhsb-57a2l" # Replace with your actual repository
|
14 |
|
15 |
# Load the tokenizer from the repository
|
16 |
tokenizer = AutoTokenizer.from_pretrained(model_repo)
|
|
|
49 |
import gradio as gr
|
50 |
|
51 |
# Define the repository where your model is saved
|
52 |
+
model_repo = "Dumele/autotrain-shhsb-57a2l" # Replace with your actual repository name
|
53 |
|
54 |
# Load the tokenizer from the repository
|
55 |
tokenizer = AutoTokenizer.from_pretrained(model_repo)
|