FareedKhan commited on
Commit
4dc4a20
·
1 Parent(s): 6724a85

Max seq length defaultness

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -71,7 +71,7 @@ def setup_seed(seed):
71
  st.sidebar.title("Model Settings")
72
  st.sidebar.text("Higher context memory may reduce response quality over long conversations.")
73
  st.session_state.history_chat_num = st.sidebar.slider("Number of Historical Dialogues", 0, 6, 0, step=2)
74
- st.session_state.max_new_tokens = st.sidebar.slider("Max Sequence Length", 256, 8192, 8192, step=1)
75
  st.session_state.top_p = st.sidebar.slider("Top-P", 0.8, 0.99, 0.85, step=0.01)
76
  st.session_state.temperature = st.sidebar.slider("Temperature", 0.6, 1.2, 0.85, step=0.01)
77
 
 
71
  st.sidebar.title("Model Settings")
72
  st.sidebar.text("Higher context memory may reduce response quality over long conversations.")
73
  st.session_state.history_chat_num = st.sidebar.slider("Number of Historical Dialogues", 0, 6, 0, step=2)
74
+ st.session_state.max_new_tokens = st.sidebar.slider("Max Sequence Length", 256, 8192, 256, step=1)
75
  st.session_state.top_p = st.sidebar.slider("Top-P", 0.8, 0.99, 0.85, step=0.01)
76
  st.session_state.temperature = st.sidebar.slider("Temperature", 0.6, 1.2, 0.85, step=0.01)
77