Spaces:
Running on CPU Upgrade

akhaliq HF staff commited on
Commit
ff9285c
·
1 Parent(s): d09a226

add minimax

Browse files
Files changed (4) hide show
  1. app.py +3 -1
  2. app_minimax.py +22 -0
  3. pyproject.toml +1 -1
  4. requirements.txt +2 -1
app.py CHANGED
@@ -34,10 +34,12 @@ from app_groq_coder import demo as demo_groq_coder
34
  from app_openai_coder import demo as demo_openai_coder
35
  from app_langchain import demo as demo_langchain
36
  from app_mistral import demo as demo_mistral
 
37
  from utils import get_app
38
 
39
  # Create mapping of providers to their demos
40
  PROVIDERS = {
 
41
  "Gemini Camera": demo_gemini_camera,
42
  "Mistral": demo_mistral,
43
  "Langchain Agent": demo_langchain,
@@ -78,7 +80,7 @@ PROVIDERS = {
78
 
79
  demo = get_app(
80
  models=list(PROVIDERS.keys()),
81
- default_model="Gemini Camera",
82
  src=PROVIDERS,
83
  dropdown_label="Select Provider",
84
  )
 
34
  from app_openai_coder import demo as demo_openai_coder
35
  from app_langchain import demo as demo_langchain
36
  from app_mistral import demo as demo_mistral
37
+ from app_minimax import demo as demo_minimax
38
  from utils import get_app
39
 
40
  # Create mapping of providers to their demos
41
  PROVIDERS = {
42
+ "Minimax": demo_minimax,
43
  "Gemini Camera": demo_gemini_camera,
44
  "Mistral": demo_mistral,
45
  "Langchain Agent": demo_langchain,
 
80
 
81
  demo = get_app(
82
  models=list(PROVIDERS.keys()),
83
+ default_model="Minimax",
84
  src=PROVIDERS,
85
  dropdown_label="Select Provider",
86
  )
app_minimax.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ai_gradio
2
+
3
+ from utils_ai_gradio import get_app
4
+
5
+ # Get the hyperbolic models but keep their full names for loading
6
+ MINIMAX_MODELS_FULL = [k for k in ai_gradio.registry.keys() if k.startswith("minimax:")]
7
+
8
+ # Create display names without the prefix
9
+ MINIMAX_MODELS_DISPLAY = [k.replace("minimax:", "") for k in MINIMAX_MODELS_FULL]
10
+
11
+
12
+ # Create and launch the interface using get_app utility
13
+ demo = get_app(
14
+ models=MINIMAX_MODELS_FULL, # Use the full names with prefix
15
+ default_model=MINIMAX_MODELS_FULL[0],
16
+ dropdown_label="Select Minimax Model",
17
+ choices=MINIMAX_MODELS_DISPLAY, # Display names without prefix
18
+ fill_height=True,
19
+ )
20
+
21
+ if __name__ == "__main__":
22
+ demo.launch()
pyproject.toml CHANGED
@@ -38,7 +38,7 @@ dependencies = [
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
- "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral]>=0.2.15",
42
  ]
43
 
44
  [tool.uv.sources]
 
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
+ "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral,minimax]>=0.2.16",
42
  ]
43
 
44
  [tool.uv.sources]
requirements.txt CHANGED
@@ -2,7 +2,7 @@
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
- ai-gradio==0.2.15
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio
@@ -990,6 +990,7 @@ replicate-gradio @ git+https://github.com/AK391/replicate-gradio.git@691c397515f
990
  # via anychat (pyproject.toml)
991
  requests==2.32.3
992
  # via
 
993
  # auth0-python
994
  # cohere
995
  # crewai-tools
 
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
+ ai-gradio==0.2.16
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio
 
990
  # via anychat (pyproject.toml)
991
  requests==2.32.3
992
  # via
993
+ # ai-gradio
994
  # auth0-python
995
  # cohere
996
  # crewai-tools