Spaces:
Sleeping
Sleeping
File size: 1,415 Bytes
b20c769 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
import gradio as gr
import torch
import os
# Load the model (dummy function for demonstration)
def load_model(encoder_path, decoder_path):
if not os.path.exists(encoder_path) or not os.path.exists(decoder_path):
raise FileNotFoundError("Model files not found. Please check the paths.")
# Simulate loading a model
return "Model loaded successfully!"
# Inference function (dummy function for demonstration)
def infer(input_data, encoder_path, decoder_path):
try:
# Load model
load_model(encoder_path, decoder_path)
# Simulate inference
return f"Inference completed on input: {input_data}"
except Exception as e:
return str(e)
# Gradio interface
with gr.Blocks() as demo:
gr.Markdown("# Galileo Remote Sensing Model Inference")
gr.Markdown("This app allows you to perform inference using the Galileo pretrained models.")
encoder_path = gr.Textbox(label="Encoder Model Path", placeholder="Path to encoder.pt")
decoder_path = gr.Textbox(label="Decoder Model Path", placeholder="Path to decoder.pt")
input_data = gr.Textbox(label="Input Data", placeholder="Enter input data for inference")
output = gr.Textbox(label="Output")
submit_btn = gr.Button("Run Inference")
submit_btn.click(infer, inputs=[input_data, encoder_path, decoder_path], outputs=output)
# Launch the app
if __name__ == "__main__":
demo.launch() |