aresnow commited on
Commit
da866b5
1 Parent(s): a9bd2fc

modify max tokens

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -131,7 +131,7 @@ class GradioApp:
131
  max_token = gr.Slider(
132
  128,
133
  1024,
134
- value=256,
135
  step=1,
136
  label=self._locale("Max tokens"),
137
  info=self._locale("The maximum number of tokens to generate."),
@@ -462,7 +462,7 @@ async def launch_xinference():
462
  address=supervisor_address, supervisor_address=supervisor_address
463
  )
464
  api = AsyncSupervisorAPI(supervisor_address)
465
- supported_models = ["orca", "chatglm2", "chatglm", "vicuna-v1.3"]
466
  for model in supported_models:
467
  await api.launch_model(str(uuid.uuid4()), model)
468
 
 
131
  max_token = gr.Slider(
132
  128,
133
  1024,
134
+ value=128,
135
  step=1,
136
  label=self._locale("Max tokens"),
137
  info=self._locale("The maximum number of tokens to generate."),
 
462
  address=supervisor_address, supervisor_address=supervisor_address
463
  )
464
  api = AsyncSupervisorAPI(supervisor_address)
465
+ supported_models = ["chatglm2", "chatglm", "vicuna-v1.3", "orca"]
466
  for model in supported_models:
467
  await api.launch_model(str(uuid.uuid4()), model)
468