import warnings import gradio as gr from proxy_model import RemoteModelProxy # Suppress the FutureWarning warnings.filterwarnings("ignore", category=FutureWarning, module="torch") # Load the model via the proxy model_proxy = RemoteModelProxy("deepseek-ai/DeepSeek-V3") # Define the text classification function def classify_text(text): try: result = model_proxy.classify_text(text) return result except Exception as e: print(f"Error during text classification: {e}") return { "Predicted Class": "Error", "Probabilities": [] } # Create a Gradio interface try: iface = gr.Interface( fn=classify_text, # Function to call inputs=gr.Textbox(lines=2, placeholder="Enter text here..."), # Input component outputs=[ gr.Label(label="Predicted Class"), # Output component for predicted class gr.Label(label="Probabilities") # Output component for probabilities ], title="DeepSeek-V3 Text Classification", description="Classify text using the DeepSeek-V3 model." ) except Exception as e: print(f"Failed to create Gradio interface: {e}") # Launch the interface try: iface.launch() except Exception as e: print(f"Failed to launch Gradio interface: {e}")