Spaces:
Runtime error
Runtime error
Synced repo using 'sync_with_huggingface' Github Action
Browse files- client/html/index.html +2 -1
- g4f/Provider/Providers/Naga.py +2 -1
- g4f/models.py +7 -1
client/html/index.html
CHANGED
@@ -116,9 +116,10 @@
|
|
116 |
<option value="gpt-4-0314">GPT-4-0314</option>
|
117 |
<option value="gpt-4-32k">GPT-4-32k</option>
|
118 |
</optgroup>
|
119 |
-
<optgroup label="
|
120 |
<option value="oasst-sft-6-llama-30b">LLaMa-30B-sft-6</option>
|
121 |
<option value="llama-2-70b-chat">LLaMa-2-70B-chat</option>
|
|
|
122 |
</optgroup>
|
123 |
<optgroup label="CLAUDE">
|
124 |
<option value="claude-instant">Claude-instant</option>
|
|
|
116 |
<option value="gpt-4-0314">GPT-4-0314</option>
|
117 |
<option value="gpt-4-32k">GPT-4-32k</option>
|
118 |
</optgroup>
|
119 |
+
<optgroup label="OPEN LLM">
|
120 |
<option value="oasst-sft-6-llama-30b">LLaMa-30B-sft-6</option>
|
121 |
<option value="llama-2-70b-chat">LLaMa-2-70B-chat</option>
|
122 |
+
<option value="falcon-180b-chat">Falcon-180b-chat</option>
|
123 |
</optgroup>
|
124 |
<optgroup label="CLAUDE">
|
125 |
<option value="claude-instant">Claude-instant</option>
|
g4f/Provider/Providers/Naga.py
CHANGED
@@ -17,8 +17,9 @@ model = [
|
|
17 |
'gpt-4',
|
18 |
'gpt-4-0314',
|
19 |
'gpt-4-32k',
|
20 |
-
'llama-2-70b-chat',
|
21 |
'oasst-sft-6-llama-30b',
|
|
|
|
|
22 |
'claude-instant',
|
23 |
'claude-2',
|
24 |
'claude-2-100k'
|
|
|
17 |
'gpt-4',
|
18 |
'gpt-4-0314',
|
19 |
'gpt-4-32k',
|
|
|
20 |
'oasst-sft-6-llama-30b',
|
21 |
+
'llama-2-70b-chat',
|
22 |
+
'falcon-180b-chat',
|
23 |
'claude-instant',
|
24 |
'claude-2',
|
25 |
'claude-2-100k'
|
g4f/models.py
CHANGED
@@ -97,6 +97,11 @@ class Model:
|
|
97 |
base_provider: str = 'huggingface-chat'
|
98 |
best_provider: Provider.Provider = Provider.Naga
|
99 |
|
|
|
|
|
|
|
|
|
|
|
100 |
|
101 |
|
102 |
class ModelUtils:
|
@@ -117,8 +122,9 @@ class ModelUtils:
|
|
117 |
'claude-2': Model.claude_2,
|
118 |
'claude-2-100k': Model.claude_2_100k,
|
119 |
|
|
|
120 |
'llama-2-7b-chat': Model.llama_2_7b_chat,
|
121 |
'llama-2-13b-chat': Model.llama_2_13b_chat,
|
122 |
'llama-2-70b-chat': Model.llama_2_70b_chat,
|
123 |
-
'
|
124 |
}
|
|
|
97 |
base_provider: str = 'huggingface-chat'
|
98 |
best_provider: Provider.Provider = Provider.Naga
|
99 |
|
100 |
+
class falcon_180b_chat:
|
101 |
+
name: str = 'falcon-180b-chat'
|
102 |
+
base_provider: str = 'huggingface-chat'
|
103 |
+
best_provider: Provider.Provider = Provider.Naga
|
104 |
+
|
105 |
|
106 |
|
107 |
class ModelUtils:
|
|
|
122 |
'claude-2': Model.claude_2,
|
123 |
'claude-2-100k': Model.claude_2_100k,
|
124 |
|
125 |
+
'oasst-sft-6-llama-30b': Model.oasst_sft_6_llama_30b,
|
126 |
'llama-2-7b-chat': Model.llama_2_7b_chat,
|
127 |
'llama-2-13b-chat': Model.llama_2_13b_chat,
|
128 |
'llama-2-70b-chat': Model.llama_2_70b_chat,
|
129 |
+
'falcon-180b-chat': Model.falcon_180b_chat
|
130 |
}
|