sample2 / app.py
Haleshot's picture
add notebook
867cb11 unverified
# /// script
# requires-python = ">=3.12"
# dependencies = [
# "marimo-labs==0.1.0",
# "marimo",
# ]
# ///
import marimo
__generated_with = "0.11.5"
app = marimo.App(width="medium")
@app.cell
def _():
import marimo as mo
return (mo,)
@app.cell(hide_code=True)
def _(mo):
default_model = mo.query_params().get("model") or ""
model_repo = mo.ui.text(
label="Model repo",
full_width=True,
value=default_model,
placeholder="e.g. gpt2, bert-base-uncased",
).form(bordered=False)
model_repo
return default_model, model_repo
@app.cell(hide_code=True)
def _(mo):
mo.md(
r"""
Interactively try out AI models hosted on HuggingFace!
Some popular models to try:
- **Feature Extraction**: `julien-c/distilbert-feature-extraction`
- **Text to Speech**: `julien-c/ljspeech_tts_train_tacotron2_raw_phn_tacotron_g2p_en_no_space_train`
- **Text to Image**: `runwayml/stable-diffusion-v1-5`
"""
)
return
@app.cell(hide_code=True)
def _(mo):
mo.accordion(
{
"View more": mo.md("""
_Audio Models_
- **Audio Classification**: `ehcalabres/wav2vec2-lg-xlsr-en-speech-emotion-recognition`
- **Audio to Audio**: `facebook/xm_transformer_sm_all-en`
- **Speech Recognition**: `facebook/wav2vec2-base-960h`
_Image Models_
- **Image Classification**: `google/vit-base-patch16-224`
- **Image to Text**: `Salesforce/blip-image-captioning-base`
- **Object Detection**: `microsoft/table-transformer-detection`
_Text Models_
- **Fill Mask**: `distilbert/distilbert-base-uncased`
- **Zero-shot Classification**: `facebook/bart-large-mnli`
- **Sentence Similarity**: `sentence-transformers/all-MiniLM-L6-v2`
""")
}
)
return
@app.cell(hide_code=True)
def _(mo):
token = mo.ui.text(
label="HuggingFace token (optional)",
kind="password",
).form(bordered=False)
mo.accordion({"Enter your HuggingFace token (optional)": token})
return (token,)
@app.cell(hide_code=True)
def _(mo, model_repo, molabs, token):
if not model_repo.value:
mo.stop("Please enter a model name")
model_path = "models/" + model_repo.value.replace("models/", "")
model = molabs.huggingface.load(
model_path, hf_token=token.value if token.value else None
)
return model, model_path
@app.cell(hide_code=True)
def _(mo, model):
mo.stop(not model)
mo.md(
f"""
### Example inputs
Try these sample inputs with the model:
{mo.as_html(model.examples)}
"""
)
return
@app.cell(hide_code=True)
def _(mo, model):
inputs = model.inputs
mo.vstack(
[
mo.md("### Model Inputs"),
mo.md("_Submit inputs below to run inference_ 👇"),
inputs,
],
)
return (inputs,)
@app.cell(hide_code=True)
def _(inputs, mo, model):
if inputs.value is None:
mo.stop("Please provide input values")
with mo.status.spinner("Running inference..."):
output = model.inference_function(inputs.value)
mo.output.replace(mo.vstack([mo.md("### Results"), output]))
return (output,)
@app.cell
def _():
import sys
import marimo_labs as molabs
return molabs, sys
if __name__ == "__main__":
app.run()