teddyllm commited on
Commit
5aeaf40
1 Parent(s): 27b349d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -4
app.py CHANGED
@@ -25,7 +25,7 @@ from examples import (
25
  )
26
 
27
  repo_id = "meta/llama-3.1-405b-instruct"
28
- llm_client = ChatNVIDIA(model=repo_id)
29
  end_sequence = "I hope that helps!"
30
 
31
  def generate_key_points(text):
@@ -178,8 +178,28 @@ def create_map_from_markers(dataframe):
178
 
179
  def run_display(text):
180
  current_output = ""
181
- for output in generate_key_points(text):
182
- current_output += output
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
183
  yield None, "```text\n" + current_output + "\n```"
184
  current_output = current_output.replace("</s>", "")
185
  dataframe, _ = parse_llm_output(current_output)
@@ -227,4 +247,4 @@ with gr.Blocks(
227
  )
228
 
229
  if __name__ == "__main__":
230
- demo.launch()
 
25
  )
26
 
27
  repo_id = "meta/llama-3.1-405b-instruct"
28
+ llm_client = ChatNVIDIA(model=repo_id, max_tokens=2000)
29
  end_sequence = "I hope that helps!"
30
 
31
  def generate_key_points(text):
 
178
 
179
  def run_display(text):
180
  current_output = ""
181
+ prompt = f"""
182
+ Please generate a set of key geographical points for the following description: {text}, as a json list of less than 10 dictionnaries with the following keys: 'name', 'description'.
183
+ ALWAYS precise the city and country in the 'name'. For instance do not only "name": "Notre Dame" as the name but "name": "Notre Dame, Paris, France".
184
+ Generally try to minimize the distance between locations. Always think of the transportation means that you want to use, and the timing: morning, afternoon, where to sleep.
185
+ Only generate two sections: 'Thought:' provides your rationale for generating the points, then you list the locations in 'Key points:'.
186
+ Then generate '{end_sequence}' to indicate the end of the response.
187
+
188
+ For instance:
189
+ Description: {description_sf}
190
+ Thought: {output_example_sf}
191
+ {end_sequence}
192
+
193
+ Description: {description_loire}
194
+ Thought: {output_example_loire}
195
+ {end_sequence}
196
+
197
+ Now begin. You can make the descriptions a bit more verbose than in the examples.
198
+
199
+ Description: {text}
200
+ Thought:"""
201
+ for output in llm_client.stream(prompt):
202
+ current_output += output.content
203
  yield None, "```text\n" + current_output + "\n```"
204
  current_output = current_output.replace("</s>", "")
205
  dataframe, _ = parse_llm_output(current_output)
 
247
  )
248
 
249
  if __name__ == "__main__":
250
+ demo.launch(debug=True)