Spaces:
Running
on
Zero
Running
on
Zero
Upload 2 files
Browse files- llmenv.py +37 -0
- requirements.txt +1 -1
llmenv.py
CHANGED
@@ -151,6 +151,7 @@ llm_models = {
|
|
151 |
"BianCang-Qwen2.5-14B-Instruct.Q4_K_M.gguf": ["mradermacher/BianCang-Qwen2.5-14B-Instruct-GGUF", MessagesFormatterType.OPEN_CHAT],
|
152 |
"Simulation_LLM_wiki_14B_V2.Q4_K_M.gguf": ["mradermacher/Simulation_LLM_wiki_14B_V2-GGUF", MessagesFormatterType.OPEN_CHAT],
|
153 |
"Neona-12B.i1-Q4_K_M.gguf": ["mradermacher/Neona-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
154 |
"Irixxed_Homunculus-12B-Q3T-v.0.3-Reasoner.Q4_K_M.gguf": ["mradermacher/Irixxed_Homunculus-12B-Q3T-v.0.3-Reasoner-GGUF", MessagesFormatterType.MISTRAL],
|
155 |
"Gemma-2-Llama-Swallow-9b-pt-v0.1.Q4_K_M.gguf": ["mradermacher/Gemma-2-Llama-Swallow-9b-pt-v0.1-GGUF", MessagesFormatterType.ALPACA],
|
156 |
"Qwen2.5-7B-base-french-bespoke-stratos-full-sft.i1-Q5_K_S.gguf": ["mradermacher/Qwen2.5-7B-base-french-bespoke-stratos-full-sft-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
@@ -168,6 +169,42 @@ llm_models = {
|
|
168 |
#"": ["", MessagesFormatterType.OPEN_CHAT],
|
169 |
#"": ["", MessagesFormatterType.CHATML],
|
170 |
#"": ["", MessagesFormatterType.PHI_3],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
171 |
"Metis-RISE-7B.i1-Q5_K_M.gguf": ["mradermacher/Metis-RISE-7B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
172 |
"OpenBuddy-R10528DistillQwen-14B-v27.1.i1-Q4_K_M.gguf": ["mradermacher/OpenBuddy-R10528DistillQwen-14B-v27.1-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
173 |
"sft_subtitles_fixer_v1_merged.Q4_K_M.gguf": ["mradermacher/sft_subtitles_fixer_v1_merged-GGUF", MessagesFormatterType.ALPACA],
|
|
|
151 |
"BianCang-Qwen2.5-14B-Instruct.Q4_K_M.gguf": ["mradermacher/BianCang-Qwen2.5-14B-Instruct-GGUF", MessagesFormatterType.OPEN_CHAT],
|
152 |
"Simulation_LLM_wiki_14B_V2.Q4_K_M.gguf": ["mradermacher/Simulation_LLM_wiki_14B_V2-GGUF", MessagesFormatterType.OPEN_CHAT],
|
153 |
"Neona-12B.i1-Q4_K_M.gguf": ["mradermacher/Neona-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
154 |
+
"Pinecone-Rune-12b.Q4_K_M.gguf": ["mradermacher/Pinecone-Rune-12b-GGUF", MessagesFormatterType.MISTRAL],
|
155 |
"Irixxed_Homunculus-12B-Q3T-v.0.3-Reasoner.Q4_K_M.gguf": ["mradermacher/Irixxed_Homunculus-12B-Q3T-v.0.3-Reasoner-GGUF", MessagesFormatterType.MISTRAL],
|
156 |
"Gemma-2-Llama-Swallow-9b-pt-v0.1.Q4_K_M.gguf": ["mradermacher/Gemma-2-Llama-Swallow-9b-pt-v0.1-GGUF", MessagesFormatterType.ALPACA],
|
157 |
"Qwen2.5-7B-base-french-bespoke-stratos-full-sft.i1-Q5_K_S.gguf": ["mradermacher/Qwen2.5-7B-base-french-bespoke-stratos-full-sft-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
|
|
169 |
#"": ["", MessagesFormatterType.OPEN_CHAT],
|
170 |
#"": ["", MessagesFormatterType.CHATML],
|
171 |
#"": ["", MessagesFormatterType.PHI_3],
|
172 |
+
"VersaVid-R1.Q5_K_M.gguf": ["mradermacher/VersaVid-R1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
173 |
+
"sphinxnautics-7b-kl-distilled.Q5_K_M.gguf": ["mradermacher/sphinxnautics-7b-kl-distilled-GGUF", MessagesFormatterType.OPEN_CHAT],
|
174 |
+
"gemma3-12b-tolkien.Q5_K_M.gguf": ["mradermacher/gemma3-12b-tolkien-GGUF", MessagesFormatterType.ALPACA],
|
175 |
+
"ThinkLite-Critic-7b.Q5_K_M.gguf": ["mradermacher/ThinkLite-Critic-7b-GGUF", MessagesFormatterType.OPEN_CHAT],
|
176 |
+
"Qwen2.5-nsfw.Q5_K_M.gguf": ["mradermacher/Qwen2.5-nsfw-GGUF", MessagesFormatterType.OPEN_CHAT],
|
177 |
+
"Rewiz-Tom-7B.Q5_K_M.gguf": ["mradermacher/Rewiz-Tom-7B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
178 |
+
"NextCoder-Mirage-sol-7B.i1-Q5_K_M.gguf": ["mradermacher/NextCoder-Mirage-sol-7B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
179 |
+
"qwen2.5-7b-v1.Q5_K_M.gguf": ["mradermacher/qwen2.5-7b-v1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
180 |
+
"CriticLeanGPT-Qwen2.5-7B-Instruct-SFT.i1-Q5_K_M.gguf": ["mradermacher/CriticLeanGPT-Qwen2.5-7B-Instruct-SFT-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
181 |
+
"OpenR1-Qwen-7B-Instruct-SFT-lora-ctx16k.Q5_K_M.gguf": ["mradermacher/OpenR1-Qwen-7B-Instruct-SFT-lora-ctx16k-GGUF", MessagesFormatterType.OPEN_CHAT],
|
182 |
+
"CriticLeanGPT-Qwen2.5-14B-Instruct-SFT-RL.Q4_K_M.gguf": ["mradermacher/CriticLeanGPT-Qwen2.5-14B-Instruct-SFT-RL-GGUF", MessagesFormatterType.OPEN_CHAT],
|
183 |
+
"Qwen3-14B-SFT.Q5_K_M.gguf": ["mradermacher/Qwen3-14B-SFT-GGUF", MessagesFormatterType.OPEN_CHAT],
|
184 |
+
"Blossom-V6.1-8B.Q5_K_M.gguf": ["mradermacher/Blossom-V6.1-8B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
185 |
+
"Homunculus-abliterated.Q4_K_M.gguf": ["mradermacher/Homunculus-abliterated-GGUF", MessagesFormatterType.CHATML],
|
186 |
+
"GThinker-7B.i1-Q5_K_M.gguf": ["mradermacher/GThinker-7B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
187 |
+
"TAMA-QWen2.5.i1-Q5_K_M.gguf": ["mradermacher/TAMA-QWen2.5-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
188 |
+
"Qwen3-psychological-reasoning-1.7B.Q5_K_M.gguf": ["mradermacher/Qwen3-psychological-reasoning-1.7B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
189 |
+
"QuasiStarSynth-12B.Q4_K_M.gguf": ["mradermacher/QuasiStarSynth-12B-GGUF", MessagesFormatterType.MISTRAL],
|
190 |
+
"Solution-back-7B.Q5_K_M.gguf": ["mradermacher/Solution-back-7B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
191 |
+
"Qwen2.5-3B-UFO-hotpotqa-1turn.Q5_K_M.gguf": ["mradermacher/Qwen2.5-3B-UFO-hotpotqa-1turn-GGUF", MessagesFormatterType.OPEN_CHAT],
|
192 |
+
"AbyssSynth-12B.Q4_K_M.gguf": ["mradermacher/AbyssSynth-12B-GGUF", MessagesFormatterType.MISTRAL],
|
193 |
+
"E-Star-12B-v0.1.Q4_K_M.gguf": ["mradermacher/E-Star-12B-v0.1-GGUF", MessagesFormatterType.ALPACA],
|
194 |
+
"SingularitySynth-12B.i1-Q4_K_M.gguf": ["mradermacher/SingularitySynth-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
195 |
+
"MT2-Gen2_gemma-3-12B.Q4_K_M.gguf": ["mradermacher/MT2-Gen2_gemma-3-12B-GGUF", MessagesFormatterType.ALPACA],
|
196 |
+
"LilithCore-v1-12B.Q4_K_M.gguf": ["mradermacher/LilithCore-v1-12B-GGUF", MessagesFormatterType.MISTRAL],
|
197 |
+
"WeThink-Qwen2.5VL-7B.i1-Q5_K_M.gguf": ["mradermacher/WeThink-Qwen2.5VL-7B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
198 |
+
"RuleReasoner-8B.Q5_K_M.gguf": ["mradermacher/RuleReasoner-8B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
199 |
+
"Vision-Matters-7B.Q5_K_M.gguf": ["mradermacher/Vision-Matters-7B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
200 |
+
"gemma-3-cn-novel-4b.Q5_K_M.gguf": ["mradermacher/gemma-3-cn-novel-4b-GGUF", MessagesFormatterType.ALPACA],
|
201 |
+
"Qwen2.5-7B-Instruct-ultrafeedback-10k.Q5_K_M.gguf": ["mradermacher/Qwen2.5-7B-Instruct-ultrafeedback-10k-GGUF", MessagesFormatterType.OPEN_CHAT],
|
202 |
+
"RACRO-7B-CRO.i1-Q5_K_M.gguf": ["mradermacher/RACRO-7B-CRO-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
203 |
+
"QVikhr-3-4B-Instruction.Q6_K.gguf": ["mradermacher/QVikhr-3-4B-Instruction-GGUF", MessagesFormatterType.OPEN_CHAT],
|
204 |
+
"care-japanese-gemma2-9b.Q4_K_M.gguf": ["mradermacher/care-japanese-gemma2-9b-GGUF", MessagesFormatterType.ALPACA],
|
205 |
+
"Geo-SFT.i1-Q5_K_M.gguf": ["mradermacher/Geo-SFT-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
206 |
+
"Geo-R1-CI-0620.Q5_K_M.gguf": ["mradermacher/Geo-R1-CI-0620-GGUF", MessagesFormatterType.OPEN_CHAT],
|
207 |
+
"SpaceQwen2.5-VL-3B-Instruct.Q5_K_M.gguf": ["mradermacher/SpaceQwen2.5-VL-3B-Instruct-GGUF", MessagesFormatterType.OPEN_CHAT],
|
208 |
"Metis-RISE-7B.i1-Q5_K_M.gguf": ["mradermacher/Metis-RISE-7B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
209 |
"OpenBuddy-R10528DistillQwen-14B-v27.1.i1-Q4_K_M.gguf": ["mradermacher/OpenBuddy-R10528DistillQwen-14B-v27.1-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
210 |
"sft_subtitles_fixer_v1_merged.Q4_K_M.gguf": ["mradermacher/sft_subtitles_fixer_v1_merged-GGUF", MessagesFormatterType.ALPACA],
|
requirements.txt
CHANGED
@@ -5,7 +5,7 @@ hf_transfer
|
|
5 |
scikit-build-core
|
6 |
#https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.4-cu124/llama_cpp_python-0.3.4-cp310-cp310-linux_x86_64.whl
|
7 |
#git+https://github.com/Maximilian-Winter/llama-cpp-agent
|
8 |
-
https://github.com/John6666cat/llama-cpp-python/releases/download/v0.3.
|
9 |
git+https://github.com/John6666cat/llama-cpp-agent
|
10 |
pybind11>=2.12
|
11 |
torch==2.4.0
|
|
|
5 |
scikit-build-core
|
6 |
#https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.4-cu124/llama_cpp_python-0.3.4-cp310-cp310-linux_x86_64.whl
|
7 |
#git+https://github.com/Maximilian-Winter/llama-cpp-agent
|
8 |
+
https://github.com/John6666cat/llama-cpp-python/releases/download/v0.3.14-cu124-AVX-linux-20250731/llama_cpp_python-0.3.14-cp310-cp310-linux_x86_64.whl
|
9 |
git+https://github.com/John6666cat/llama-cpp-agent
|
10 |
pybind11>=2.12
|
11 |
torch==2.4.0
|