Textwizai / load_model.py
Erfan11's picture
Update load_model.py
1a44ab8 verified
raw
history blame
1.17 kB
import os
from dotenv import load_dotenv
from transformers import TFBertForSequenceClassification, BertTokenizerFast
# Load environment variables from .env file
load_dotenv()
def load_model(model_name):
try:
# Load TensorFlow model from Hugging Face
model = TFBertForSequenceClassification.from_pretrained(model_name, use_auth_token=os.getenv('API_KEY'), from_tf=True)
except OSError:
raise ValueError("Model loading failed.")
return model
def load_tokenizer(model_name):
tokenizer = BertTokenizerFast.from_pretrained(model_name, use_auth_token=os.getenv('API_KEY')
return tokenizer
def predict(text, model, tokenizer):
inputs = tokenizer(text, return_tensors="tf")
outputs = model(**inputs)
return outputs
def main():
model_name = os.getenv('Erfan11/Neuracraft')
if model_name is None:
raise ValueError("MODEL_PATH environment variable not set or is None")
model = load_model(model_name)
tokenizer = load_tokenizer(model_name)
# Example prediction
text = "Sample input text"
result = predict(text, model, tokenizer)
print(result)
if __name__ == "__main__":
main()