Omnibus commited on
Commit
b08b82d
·
verified ·
1 Parent(s): 53c5f2a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -152
app.py CHANGED
@@ -96,145 +96,11 @@ def run_gpt(
96
  return resp
97
 
98
 
99
- def compress_data(c,purpose, task, history):
100
- seed=random.randint(1,1000000000)
101
- print (c)
102
- divr=int(c)/MAX_DATA
103
- divi=int(divr)+1 if divr != int(divr) else int(divr)
104
- chunk = int(int(c)/divr)
105
- print(f'chunk:: {chunk}')
106
- print(f'divr:: {divr}')
107
- print (f'divi:: {divi}')
108
- out = []
109
- #out=""
110
- s=0
111
- e=chunk
112
- print(f'e:: {e}')
113
- new_history=""
114
- task = f'Compile this data to fulfill the task: {task}, and complete the purpose: {purpose}\n'
115
- for z in range(divi):
116
- print(f's:e :: {s}:{e}')
117
-
118
- hist = history[s:e]
119
- print(f'hist::\n{hist}')
120
- resp = run_gpt(
121
- COMPRESS_DATA_PROMPT,
122
- stop_tokens=["observation:", "task:", "action:", "thought:"],
123
- max_tokens=2048,
124
- seed=seed,
125
- purpose=purpose,
126
- prefix_tog="normal",
127
- task=task,
128
- knowledge=new_history,
129
- history=hist,
130
- ).strip("\n")
131
- new_history = resp
132
- print (resp)
133
- out+=resp
134
- e=e+chunk
135
- s=s+chunk
136
- '''
137
- resp = run_gpt(
138
- COMPRESS_DATA_PROMPT,
139
- stop_tokens=["observation:", "task:", "action:", "thought:"],
140
- max_tokens=2048,
141
- seed=seed,
142
- purpose=purpose,
143
- task=task,
144
- knowledge=new_history,
145
- history=result,
146
- )
147
- '''
148
- print ("final" + resp)
149
- history = "result: {}\n".format(resp)
150
- return history
151
-
152
-
153
-
154
- def get_records(inp,data):
155
- key_box=[]
156
- seed=random.randint(1,1000000000)
157
- print(inp)
158
- out = str(data)
159
- rl = len(out)
160
- print(f'rl:: {rl}')
161
- c=1
162
- for i in str(out):
163
- if i == " " or i=="," or i=="\n" or i=="/" or i=="." or i=="<":
164
- c +=1
165
- print (f'c:: {c}')
166
- divr=int(c)/MAX_DATA
167
- divi=int(divr)+1 if divr != int(divr) else int(divr)
168
- chunk = int(int(c)/divr)
169
- print(f'chunk:: {chunk}')
170
- print(f'divr:: {divr}')
171
- print (f'divi:: {divi}')
172
- s=0
173
- e=chunk
174
- print(f'e:: {e}')
175
- new_history=""
176
- #task = f'Compile this data to fulfill the task: {task}, and complete the purpose: {purpose}\n'
177
- for z in range(divi):
178
- print(f's:e :: {s}:{e}')
179
-
180
- hist = out[s:e]
181
- print(f'hist::\n{hist}')
182
- resp = run_gpt(
183
- GET_KEYWORD,
184
- stop_tokens=[],
185
- max_tokens=2048,
186
- seed=seed,
187
- purpose=inp,
188
- prefix_tog="alternate",
189
- task=inp,
190
- knowledge=new_history,
191
- history=hist,
192
- ).strip("\n")
193
- new_history = resp
194
- print (f'resp {z}::\n {resp}')
195
- #out+=resp
196
- e=e+chunk
197
- s=s+chunk
198
- yield "", [(inp,new_history)]
199
-
200
-
201
- def get_key(inp,data):
202
- key_box=[]
203
- seed=random.randint(1,1000000000)
204
- key_w = run_gpt(
205
- GET_KEYWORD,
206
- stop_tokens=[],
207
- max_tokens=56,
208
- seed=seed,
209
- purpose=inp,
210
- prefix_tog="normal",
211
- task=inp,
212
- ).split("<")[0]
213
- print(f'key_w::{key_w}')
214
- if " " in key_w:
215
- key_w=key_w.split(" ")[-1]
216
- for i,ba in enumerate(data):
217
- each_key=data[i].keys()
218
- print(each_key)
219
- for z,zz in enumerate(list(each_key)[0]):
220
- #for f,ff in enumerate(data[i][zz]):
221
- ea = data[i][list(each_key)[0]][z]
222
- try:
223
- if ea['title'] and key_w in ea['title']:
224
- key_box.append(ea)
225
- elif ea['description'] and key_w in ea['description']:
226
- key_box.append(ea)
227
- elif ea['link'] and key_w in ea['link']:
228
- key_box.append(ea)
229
- except Exception as e:
230
- print(e)
231
- print(key_box)
232
 
233
  NEWS_REPORTER="""You are an Expert News Aggregator. Your duty is to read and compress all of the News Articles you are given into 10 or more individual articles that capture the full context of the current news. Compile your articles into JSON format which the user will load into an RSS reader for other users to read.
234
- Output Format Example:
235
  {output_format}
236
-
237
- News Articles:
238
  {new_data}
239
  """
240
  output_format="""{"title": "title of the first article","description": "description of the article","article": "your custom written article","links": "all source links that have contributed to the article"},{"title": "title of the second article","description": "description of the article","article": "your custom written article","links": "all source links that have contributed to the article"}"""
@@ -285,17 +151,13 @@ def summarize(inp,history,data=None):
285
  new_data = out[s:e]
286
  #yield "", [(inp,f'{mes}\n{new_history}')]
287
 
288
- content = NEWS_REPORTER.format(output_format=output_format,new_data=str(new_data.replace("{","").replace("}","")))
 
289
  stream = client.text_generation(content, **generate_kwargs, stream=True, details=True, return_full_text=False)
290
  for response in stream:
291
  resp += response.token.text
292
  yield "", [(inp,resp)],None
293
 
294
- #for line in resp.split("\n"):
295
- #if 'title:' in line.lower():
296
-
297
-
298
- #out_json=""
299
  out_json=resp.replace("\n","").replace("```","")
300
  out_box.append(out_json.strip("</s>"))
301
  #out_box=eval(out_box)
@@ -304,16 +166,6 @@ def summarize(inp,history,data=None):
304
 
305
  e=e+chunk
306
  s=s+chunk
307
-
308
- #history = "preliminary result: {}\n".format(resp)
309
- #yield "", (inp,f'{mes}\n{history}')
310
- #print ("final" + resp)
311
- #out_hist = "result:\n{}".format(resp)
312
- #return history
313
- #yield "", [(inp,out_hist)]
314
-
315
- #out = str(out_hist)
316
- #rawp = out
317
  else:
318
  rawp = "Provide a valid data source"
319
  history.append((inp,rawp))
 
96
  return resp
97
 
98
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
 
100
  NEWS_REPORTER="""You are an Expert News Aggregator. Your duty is to read and compress all of the News Articles you are given into 10 or more individual articles that capture the full context of the current news. Compile your articles into JSON format which the user will load into an RSS reader for other users to read.
101
+ *** Output Format Example:
102
  {output_format}
103
+ *** News Articles:
 
104
  {new_data}
105
  """
106
  output_format="""{"title": "title of the first article","description": "description of the article","article": "your custom written article","links": "all source links that have contributed to the article"},{"title": "title of the second article","description": "description of the article","article": "your custom written article","links": "all source links that have contributed to the article"}"""
 
151
  new_data = out[s:e]
152
  #yield "", [(inp,f'{mes}\n{new_history}')]
153
 
154
+ #content = NEWS_REPORTER.format(output_format=output_format,new_data=str(new_data.replace("{","").replace("}","")))
155
+ content = NEWS_REPORTER.format(output_format=output_format,new_data=str(new_data))
156
  stream = client.text_generation(content, **generate_kwargs, stream=True, details=True, return_full_text=False)
157
  for response in stream:
158
  resp += response.token.text
159
  yield "", [(inp,resp)],None
160
 
 
 
 
 
 
161
  out_json=resp.replace("\n","").replace("```","")
162
  out_box.append(out_json.strip("</s>"))
163
  #out_box=eval(out_box)
 
166
 
167
  e=e+chunk
168
  s=s+chunk
 
 
 
 
 
 
 
 
 
 
169
  else:
170
  rawp = "Provide a valid data source"
171
  history.append((inp,rawp))