import os from transformers import TFBertForSequenceClassification, BertTokenizerFast def load_model(model_name): try: # Load TensorFlow model from Hugging Face model = TFBertForSequenceClassification.from_pretrained(model_name, use_auth_token=os.getenv('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')) except OSError: # Fallback to PyTorch model if TensorFlow fails model = TFBertForSequenceClassification.from_pretrained(model_name, use_auth_token=os.getenv('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'), from_pt=True) return model def load_tokenizer(model_name): tokenizer = BertTokenizerFast.from_pretrained(model_name, use_auth_token=os.getenv('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')) return tokenizer def predict(text, model, tokenizer): inputs = tokenizer(text, return_tensors="tf") outputs = model(**inputs) return outputs def main(): model_name = os.getenv('Erfan11/Neuracraft') if model_name is None: raise ValueError("Erfan11/Neuracraft environment variable not set or is None") model = load_model(model_name) tokenizer = load_tokenizer(model_name) # Example prediction text = "Sample input text" result = predict(text, model, tokenizer) print(result) if __name__ == "__main__": main()