Zainajabroh commited on
Commit
bab571f
·
verified ·
1 Parent(s): 78741dc

Update Discussion_Buddy.py

Browse files
Files changed (1) hide show
  1. Discussion_Buddy.py +2 -2
Discussion_Buddy.py CHANGED
@@ -105,7 +105,7 @@ def cleaning_text(text, is_lower_case = False):
105
 
106
  #Step 1 to classify, summary and get NER from news
107
  def extract_article(url):
108
- model = genai.GenerativeModel('gemini-1.5-pro')
109
  extract = model.generate_content(f"Extract the content from this news article: {url}, make sure extract all news part. Extract based on the news language used, if the news using Indonesia language just extract with Indonesian language, and so on if the news using english language. Make sure minimum is 200 words and maximum is 250 words").text
110
  return extract
111
  def process_news_pipeline(news_text, user_language):
@@ -153,7 +153,7 @@ def process_user_response(response_type, response_content, user_language):
153
 
154
  #Step 3, make chatbot for discussion about the news
155
  #load model and memory, to save context while discuss with chatbot
156
- model = GoogleGenerativeAI(model="gemini-1.5-pro", temperature=0.1)
157
  memory = ConversationBufferMemory()
158
  conversation = ConversationChain(
159
  llm=model,
 
105
 
106
  #Step 1 to classify, summary and get NER from news
107
  def extract_article(url):
108
+ model = genai.GenerativeModel('gemini-1.5-flash')
109
  extract = model.generate_content(f"Extract the content from this news article: {url}, make sure extract all news part. Extract based on the news language used, if the news using Indonesia language just extract with Indonesian language, and so on if the news using english language. Make sure minimum is 200 words and maximum is 250 words").text
110
  return extract
111
  def process_news_pipeline(news_text, user_language):
 
153
 
154
  #Step 3, make chatbot for discussion about the news
155
  #load model and memory, to save context while discuss with chatbot
156
+ model = GoogleGenerativeAI(model="gemini-1.5-flash", temperature=0.1)
157
  memory = ConversationBufferMemory()
158
  conversation = ConversationChain(
159
  llm=model,