Spaces:
Sleeping
Sleeping
:gem: [Feature] Add top_p and set 0.95 as default
Browse files- apis/chat_api.py +5 -0
- networks/message_streamer.py +2 -0
apis/chat_api.py
CHANGED
|
@@ -91,6 +91,10 @@ class ChatAPIApp:
|
|
| 91 |
default=0.5,
|
| 92 |
description="(float) Temperature",
|
| 93 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 94 |
max_tokens: Union[int, None] = Field(
|
| 95 |
default=-1,
|
| 96 |
description="(int) Max tokens",
|
|
@@ -115,6 +119,7 @@ class ChatAPIApp:
|
|
| 115 |
stream_response = streamer.chat_response(
|
| 116 |
prompt=composer.merged_str,
|
| 117 |
temperature=item.temperature,
|
|
|
|
| 118 |
max_new_tokens=item.max_tokens,
|
| 119 |
api_key=api_key,
|
| 120 |
use_cache=item.use_cache,
|
|
|
|
| 91 |
default=0.5,
|
| 92 |
description="(float) Temperature",
|
| 93 |
)
|
| 94 |
+
top_p: Union[float, None] = Field(
|
| 95 |
+
default=0.95,
|
| 96 |
+
description="(float) top p",
|
| 97 |
+
)
|
| 98 |
max_tokens: Union[int, None] = Field(
|
| 99 |
default=-1,
|
| 100 |
description="(int) Max tokens",
|
|
|
|
| 119 |
stream_response = streamer.chat_response(
|
| 120 |
prompt=composer.merged_str,
|
| 121 |
temperature=item.temperature,
|
| 122 |
+
top_p=item.top_p,
|
| 123 |
max_new_tokens=item.max_tokens,
|
| 124 |
api_key=api_key,
|
| 125 |
use_cache=item.use_cache,
|
networks/message_streamer.py
CHANGED
|
@@ -62,6 +62,7 @@ class MessageStreamer:
|
|
| 62 |
self,
|
| 63 |
prompt: str = None,
|
| 64 |
temperature: float = 0.5,
|
|
|
|
| 65 |
max_new_tokens: int = None,
|
| 66 |
api_key: str = None,
|
| 67 |
use_cache: bool = False,
|
|
@@ -111,6 +112,7 @@ class MessageStreamer:
|
|
| 111 |
"inputs": prompt,
|
| 112 |
"parameters": {
|
| 113 |
"temperature": temperature,
|
|
|
|
| 114 |
"max_new_tokens": max_new_tokens,
|
| 115 |
"return_full_text": False,
|
| 116 |
},
|
|
|
|
| 62 |
self,
|
| 63 |
prompt: str = None,
|
| 64 |
temperature: float = 0.5,
|
| 65 |
+
top_p: float = 0.95,
|
| 66 |
max_new_tokens: int = None,
|
| 67 |
api_key: str = None,
|
| 68 |
use_cache: bool = False,
|
|
|
|
| 112 |
"inputs": prompt,
|
| 113 |
"parameters": {
|
| 114 |
"temperature": temperature,
|
| 115 |
+
"top_p": top_p,
|
| 116 |
"max_new_tokens": max_new_tokens,
|
| 117 |
"return_full_text": False,
|
| 118 |
},
|