barunsaha commited on
Commit
5868f47
·
1 Parent(s): da09d40

Disable max tokens for Gemini & Azure OpenAI

Browse files
Files changed (1) hide show
  1. helpers/llm_helper.py +2 -2
helpers/llm_helper.py CHANGED
@@ -157,7 +157,7 @@ def get_langchain_llm(
157
  return GoogleGenerativeAI(
158
  model=model,
159
  temperature=GlobalConfig.LLM_MODEL_TEMPERATURE,
160
- max_tokens=max_new_tokens,
161
  timeout=None,
162
  max_retries=2,
163
  google_api_key=api_key,
@@ -182,7 +182,7 @@ def get_langchain_llm(
182
  api_version=azure_api_version,
183
  azure_endpoint=azure_endpoint_url,
184
  temperature=GlobalConfig.LLM_MODEL_TEMPERATURE,
185
- max_tokens=max_new_tokens,
186
  timeout=None,
187
  max_retries=1,
188
  api_key=api_key,
 
157
  return GoogleGenerativeAI(
158
  model=model,
159
  temperature=GlobalConfig.LLM_MODEL_TEMPERATURE,
160
+ # max_tokens=max_new_tokens,
161
  timeout=None,
162
  max_retries=2,
163
  google_api_key=api_key,
 
182
  api_version=azure_api_version,
183
  azure_endpoint=azure_endpoint_url,
184
  temperature=GlobalConfig.LLM_MODEL_TEMPERATURE,
185
+ # max_tokens=max_new_tokens,
186
  timeout=None,
187
  max_retries=1,
188
  api_key=api_key,