Omnibus commited on
Commit
8752425
·
verified ·
1 Parent(s): 9a2fcb0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -31
app.py CHANGED
@@ -230,7 +230,26 @@ def get_key(inp,data):
230
  print(e)
231
  print(key_box)
232
 
233
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
234
  def summarize(inp,history,data=None):
235
  json_box=[]
236
  if inp == "":
@@ -242,7 +261,17 @@ def summarize(inp,history,data=None):
242
  yield "",[(inp,"Working on it...")]
243
 
244
  if data != "Error" and data != "":
245
- print(inp)
 
 
 
 
 
 
 
 
 
 
246
  out = str(data)
247
  rl = len(out)
248
  print(f'rl:: {rl}')
@@ -251,45 +280,33 @@ def summarize(inp,history,data=None):
251
  if i == " " or i=="," or i=="\n" or i=="/" or i=="." or i=="<":
252
  c +=1
253
  print (f'c:: {c}')
254
-
255
- #json_out = compress_data(c,inp,task,out)
256
-
257
- #def compress_data(c,purpose, task, history):
258
- purpose=inp
259
- seed=random.randint(1,1000000000)
260
- print (c)
261
  divr=int(c)/MAX_DATA
262
  divi=int(divr)+1 if divr != int(divr) else int(divr)
263
  chunk = int(int(c)/divr)
264
  print(f'chunk:: {chunk}')
265
  print(f'divr:: {divr}')
266
  print (f'divi:: {divi}')
267
- #out=""
268
  s=0
269
  e=chunk
270
  print(f'e:: {e}')
271
  new_history=""
272
  task = f'Compile this data to fulfill the task: {task}, and complete the purpose: {purpose}\n'
 
 
 
273
  for z in range(divi):
274
  print(f's:e :: {s}:{e}')
275
  mes= f'Working on data chunk: {s}:{e}'
276
- hist = out[s:e]
277
- print(f'hist::\n{hist}')
278
- yield "", [(inp,f'{mes}\n{new_history}')]
279
- resp = run_gpt(
280
- COMPRESS_DATA_PROMPT,
281
- stop_tokens=[],
282
- max_tokens=2048,
283
- seed=seed,
284
- purpose=purpose,
285
- prefix_tog="normal",
286
- task=task,
287
- knowledge=new_history,
288
- history=hist,
289
- )
290
- new_history = resp
291
- print (resp)
292
- out+=resp
293
  e=e+chunk
294
  s=s+chunk
295
  #history = "preliminary result: {}\n".format(resp)
@@ -297,14 +314,14 @@ def summarize(inp,history,data=None):
297
  print ("final" + resp)
298
  out_hist = "result:\n{}".format(resp)
299
  #return history
300
- yield "", [(inp,out_hist)]
301
 
302
  out = str(out_hist)
303
- rawp = out
304
  else:
305
  rawp = "Provide a valid data source"
306
- history.append((inp,rawp))
307
- yield "", history
308
 
309
 
310
  def find_rss():
 
230
  print(e)
231
  print(key_box)
232
 
233
+ NEWS_REPORTER="""You are an Expert News Aggregator. Your duty is to compress all of the News Articles you are given into 10 or more individual articles that capture the full context of the current news. Compile your articles into JSON format which the user will load into an RSS reader for other users to read.
234
+ Add NEW DATA that you recieve to your CURRENT DATA by combining and reformatting when needed.
235
+ Output Format:
236
+ [
237
+ {
238
+ 'Title': 'title of the first article',
239
+ 'Description': 'description of the article',
240
+ 'Article': 'your custom written article',
241
+ 'Links': 'all source links that have contributed to the article'
242
+ },
243
+ {
244
+ 'Title': 'title of the second article',
245
+ 'Description': 'description of the article',
246
+ 'Article': 'your custom written article',
247
+ 'Links': 'all source links that have contributed to the article'
248
+ }
249
+ ]
250
+ News Articles:
251
+ {new_data}
252
+ """
253
  def summarize(inp,history,data=None):
254
  json_box=[]
255
  if inp == "":
 
261
  yield "",[(inp,"Working on it...")]
262
 
263
  if data != "Error" and data != "":
264
+ timestamp=datetime.datetime.now()
265
+ seed=random.randint(1,1000000000)
266
+ print(seed)
267
+ generate_kwargs = dict(
268
+ temperature=0.9,
269
+ max_new_tokens=10240,
270
+ top_p=0.95,
271
+ repetition_penalty=1.0,
272
+ do_sample=True,
273
+ seed=seed,
274
+ )
275
  out = str(data)
276
  rl = len(out)
277
  print(f'rl:: {rl}')
 
280
  if i == " " or i=="," or i=="\n" or i=="/" or i=="." or i=="<":
281
  c +=1
282
  print (f'c:: {c}')
 
 
 
 
 
 
 
283
  divr=int(c)/MAX_DATA
284
  divi=int(divr)+1 if divr != int(divr) else int(divr)
285
  chunk = int(int(c)/divr)
286
  print(f'chunk:: {chunk}')
287
  print(f'divr:: {divr}')
288
  print (f'divi:: {divi}')
 
289
  s=0
290
  e=chunk
291
  print(f'e:: {e}')
292
  new_history=""
293
  task = f'Compile this data to fulfill the task: {task}, and complete the purpose: {purpose}\n'
294
+ current_data=""
295
+ out_box
296
+ resp = ""
297
  for z in range(divi):
298
  print(f's:e :: {s}:{e}')
299
  mes= f'Working on data chunk: {s}:{e}'
300
+ new_data = out[s:e]
301
+ #yield "", [(inp,f'{mes}\n{new_history}')]
302
+
303
+ content = NEWS_REPORTER.format(new_data=new_data)
304
+ stream = client.text_generation(content, **generate_kwargs, stream=True, details=True, return_full_text=False)
305
+ for response in stream:
306
+ resp += response.token.text
307
+ yield "", [(inp,resp)]
308
+
309
+ #out+=resp
 
 
 
 
 
 
 
310
  e=e+chunk
311
  s=s+chunk
312
  #history = "preliminary result: {}\n".format(resp)
 
314
  print ("final" + resp)
315
  out_hist = "result:\n{}".format(resp)
316
  #return history
317
+ #yield "", [(inp,out_hist)]
318
 
319
  out = str(out_hist)
320
+ #rawp = out
321
  else:
322
  rawp = "Provide a valid data source"
323
+ history.append((inp,rawp))
324
+ return "", history
325
 
326
 
327
  def find_rss():