import gradio as gr
import json
import requests
import pandas as pd
def update_task_options(framework):
config = {
"Custom":["Custom"],
"Diffusers":["Text To Image"],
"Transformers":[
"Text Classification",
"Zero Shot Classification",
"Token Classifiation",
"Question Answering",
"Fill Mask",
"Summarization",
"Translation",
"Text to Text Generation",
"Text Generation",
"Feature Extraction",
"Image Classification",
"Automatic Speech Recognition",
"Audio Classification",
"Object Detection",
"Image Segmentation",
"Table Question Answering",
"Conversational",
"Visual Question Answering",
"Zero Shot Image Classification"]
}
return gr.Dropdown.update(
choices=config[framework],
value=config[framework][0] if len(config[framework]) > 0 else None
)
def update_regions(provider):
available_regions = []
headers = {
"Content-Type": "application/json",
}
endpoint_url = f"https://api.endpoints.huggingface.cloud/provider/{provider}/region"
response = requests.get(endpoint_url, headers=headers)
for region in response.json()['items']:
if region['status'] == 'available':
available_regions.append(f"{region['region']}/{region['label']}")
return gr.Dropdown.update(
choices=available_regions,
value=available_regions[0] if len(available_regions) > 0 else None
)
def update_compute_options(provider, region):
region = region.split("/")[0]
available_compute_choices = []
headers = {
"Content-Type": "application/json",
}
endpoint_url = f"https://api.endpoints.huggingface.cloud/provider/{provider}/region/{region}/compute"
print(endpoint_url)
response = requests.get(endpoint_url, headers=headers)
for compute in response.json()['items']:
if compute['status'] == 'available':
accelerator = compute['accelerator']
numAccelerators = compute['numAccelerators']
memoryGb = compute['memoryGb'].replace("Gi", "GB")
architecture = compute['architecture']
instanceType = compute['instanceType']
type = f"{numAccelerators}vCPU {memoryGb} · {architecture}" if accelerator == "cpu" else f"{numAccelerators}x {architecture}"
available_compute_choices.append(
f"{compute['accelerator'].upper()} [{compute['instanceSize']}] · {type} · {instanceType}"
)
return gr.Dropdown.update(
choices=available_compute_choices,
value=available_compute_choices[0] if len(available_compute_choices) > 0 else None
)
def submit(
hf_token_input,
endpoint_name_input,
provider_selector,
region_selector,
repository_selector,
revision_selector,
task_selector,
framework_selector,
compute_selector,
min_node_selector,
max_node_selector,
security_selector
):
compute_resources = compute_selector.split("·")
accelerator = compute_resources[0][:3].strip()
size_l_index = compute_resources[0].index("[") - 1
size_r_index = compute_resources[0].index("]")
size = compute_resources[0][size_l_index : size_r_index].strip()
type = compute_resources[-1].strip()
payload = {
"accountId": repository_selector.split("/")[0],
"compute": {
"accelerator": accelerator.lower(),
"instanceSize": size[1:],
"instanceType": type,
"scaling": {
"maxReplica": int(max_node_selector),
"minReplica": int(min_node_selector)
}
},
"model": {
"framework": "custom",
"image": {
"huggingface": {}
},
"repository": repository_selector.lower(),
"revision": revision_selector,
"task": task_selector.lower()
},
"name": endpoint_name_input.strip(),
"provider": {
"region": region_selector.split("/")[0].lower(),
"vendor": provider_selector.lower()
},
"type": security_selector.lower()
}
payload = json.dumps(payload)
print(f"Payload: {payload}")
headers = {
"Authorization": f"Bearer {hf_token_input.strip()}",
"Content-Type": "application/json",
}
endpoint_url = f"https://api.endpoints.huggingface.cloud/endpoint"
print(f"Endpoint: {endpoint_url}")
response = requests.post(endpoint_url, headers=headers, data=payload)
if response.status_code == 400:
return f"{response.text}. Malformed data in {payload}"
elif response.status_code == 401:
return "Invalid token"
elif response.status_code == 409:
return f"Error: {response.text}"
elif response.status_code == 202:
return f"Endpoint {endpoint_name_input} created successfully on {provider_selector.lower()} using {repository_selector.lower()}@{revision_selector}. \n Please check out the progress at https://ui.endpoints.huggingface.co/endpoints."
else:
return f"something went wrong {response.status_code} = {response.text}"
def delete_endpoint(
hf_token_input,
endpoint_name_input
):
response = requests.delete(
f"https://api.endpoints.huggingface.cloud/endpoint/{endpoint_name_input}",
headers = {
"Authorization": f"Bearer {hf_token_input.strip()}",
"Content-Type": "application/json",
}
)
if response.status_code == 401:
return "Invalid token"
elif response.status_code == 404:
return f"Error: {response.text}"
elif response.status_code == 202:
return f"Endpoint {endpoint_name_input} deleted successfully."
else:
return f"something went wrong {response.status_code} = {response.text}"
def get_all_endpoints(
hf_token_input,
):
response = requests.get(
f"https://api.endpoints.huggingface.cloud/endpoint",
headers = {
"Authorization": f"Bearer {hf_token_input.strip()}",
"Content-Type": "application/json",
})
if response.status_code == 401:
return "Invalid token"
elif response.status_code == 200:
endpoints_json = response.json()
print(endpoints_json)
endpoints_df = pd.DataFrame(endpoints_json["items"])
endpoints_df = endpoints_df[["name", "model", "provider", "compute", "status"]]
endpoints_df["model"] = endpoints_df["model"].apply(lambda x: x["repository"] + "@" + x["revision"])
endpoints_df["provider"] = endpoints_df["provider"].apply(lambda x: x["vendor"] + "/" + x["region"])
endpoints_df["compute"] = endpoints_df["compute"].apply(lambda x: x["instanceType"] + "·" + x["instanceSize"] + " [" + x["accelerator"] + "]")
endpoints_df["status"] = endpoints_df["status"].apply(lambda x: x["state"])
endpoints_df["minReplica"] = endpoints_df["compute"].apply(lambda x: x["scaling"]["minReplica"])
endpoints_df["maxReplica"] = endpoints_df["compute"].apply(lambda x: x["scaling"]["maxReplica"])
endpoints_df["createdAt"] = endpoints_df["status"].apply(lambda x: x["createdAt"])
endpoints_df["updatedAt"] = endpoints_df["status"].apply(lambda x: x["updatedAt"])
endpoints_df = endpoints_df[["name", "model", "provider", "compute", "status", "minReplica", "maxReplica", "createdAt", "updatedAt"]]
return gr.Dataframe.update(
value=endpoints_df
)
def update_endpoint(
hf_token_input,
endpoint_name_input,
min_node_selector,
max_node_selector,
instance_type,
):
payload ={
"compute": {
"instanceSize": instance_type.split("·")[0].split("[")[1].split("]")[0],
"instanceType": instance_type.split("·")[-1].strip(),
"scaling": {
"maxReplica": max_node_selector,
"minReplica": min_node_selector
}
}}
response = requests.put(
f"https://api.endpoints.huggingface.cloud/endpoint/{endpoint_name_input}",
headers = {
"Authorization": f"Bearer {hf_token_input.strip()}",
"Content-Type": "application/json",
},
data=payload,
)
if response.status_code == 401:
return "Invalid token"
elif response.status_code == 404:
return f"Error: {response.text}"
elif response.status_code == 202:
return f"Endpoint {endpoint_name_input} updated successfully."
else:
return f"something went wrong {response.status_code} = {response.text}"
with gr.Blocks() as interface:
gr.Markdown("""
#### Your 🤗 Access Token (Required)
""")
hf_token_input = gr.Textbox(
show_label=False,
type="password"
)
# Get All Endpoints Info
with gr.Tab("Info"):
gr.Markdown("""
### All Deployed Endpoints
""")
endpoints_table = gr.Dataframe(
headers=["Endpoint Name", "Revision", "Provider", "Instance Type", "Status", "Min Replica", "Max Replica", "Created At", "Updated At"],
col_count=(9, "fixed"),
)
endpoint_info_button = gr.Button(value="Get Info")
# Deploy Endpoint
with gr.Tab("Deploy Endpoint"):
gr.Markdown(
"""
###