Spaces:
Paused
Paused
Synced repo using 'sync_with_huggingface' Github Action
Browse files- g4f/Provider/Providers/Wewordle.py +3 -1
- g4f/models.py +1 -1
g4f/Provider/Providers/Wewordle.py
CHANGED
|
@@ -62,7 +62,9 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
|
| 62 |
if response.status_code == 200:
|
| 63 |
_json = response.json()
|
| 64 |
if 'message' in _json:
|
| 65 |
-
|
|
|
|
|
|
|
| 66 |
else:
|
| 67 |
print(f"Error Occurred::{response.status_code}")
|
| 68 |
return None
|
|
|
|
| 62 |
if response.status_code == 200:
|
| 63 |
_json = response.json()
|
| 64 |
if 'message' in _json:
|
| 65 |
+
message_content = _json['message']['content']
|
| 66 |
+
message_content = message_content.replace('**assistant:** ', '')
|
| 67 |
+
yield message_content
|
| 68 |
else:
|
| 69 |
print(f"Error Occurred::{response.status_code}")
|
| 70 |
return None
|
g4f/models.py
CHANGED
|
@@ -10,7 +10,7 @@ class Model:
|
|
| 10 |
class gpt_35_turbo:
|
| 11 |
name: str = 'gpt-3.5-turbo'
|
| 12 |
base_provider: str = 'openai'
|
| 13 |
-
best_provider: Provider.Provider = Provider.
|
| 14 |
|
| 15 |
class gpt_35_turbo_0613:
|
| 16 |
name: str = 'gpt-3.5-turbo-0613'
|
|
|
|
| 10 |
class gpt_35_turbo:
|
| 11 |
name: str = 'gpt-3.5-turbo'
|
| 12 |
base_provider: str = 'openai'
|
| 13 |
+
best_provider: Provider.Provider = Provider.Wewordle
|
| 14 |
|
| 15 |
class gpt_35_turbo_0613:
|
| 16 |
name: str = 'gpt-3.5-turbo-0613'
|