dwarkesh commited on
Commit
d34ceb2
·
1 Parent(s): 91be0ad

generates previews

Browse files
Files changed (3) hide show
  1. app.py +9 -5
  2. prompts/previews.txt +1 -1
  3. utils/content_generator.py +2 -2
app.py CHANGED
@@ -29,10 +29,11 @@ class TranscriptProcessor:
29
 
30
  # Define content generation requests
31
  requests = [
 
32
  ContentRequest("clips", max_tokens=8192),
33
  ContentRequest("description"),
34
- ContentRequest("timestamps", temperature=0.4),
35
- ContentRequest("titles_and_thumbnails", temperature=0.7),
36
  ]
37
 
38
  # Generate all content concurrently
@@ -42,12 +43,12 @@ class TranscriptProcessor:
42
  return tuple(results)
43
 
44
  except Exception as e:
45
- return (f"Error processing input: {str(e)}",) * 4
46
 
47
  def update_prompts(self, *values) -> str:
48
  """Update the current session's prompts."""
49
  self.generator.current_prompts.update(zip(
50
- ["clips", "description", "timestamps", "titles_and_thumbnails"],
51
  values
52
  ))
53
  return "Prompts updated for this session!"
@@ -63,7 +64,7 @@ def create_interface():
63
  submit_btn = gr.Button("Generate Content")
64
  outputs = [
65
  gr.Textbox(label=label, lines=10, interactive=False)
66
- for label in ["Twitter Clips", "Twitter Description", "Timestamps", "Title & Thumbnail Suggestions"]
67
  ]
68
 
69
  async def process_wrapper(text):
@@ -83,6 +84,9 @@ def create_interface():
83
  )
84
 
85
  prompt_inputs = [
 
 
 
86
  gr.Textbox(
87
  label="Clips Prompt", lines=10, value=processor.generator.current_prompts["clips"]
88
  ),
 
29
 
30
  # Define content generation requests
31
  requests = [
32
+ ContentRequest("previews", max_tokens=8192),
33
  ContentRequest("clips", max_tokens=8192),
34
  ContentRequest("description"),
35
+ ContentRequest("timestamps"),
36
+ ContentRequest("titles_and_thumbnails"),
37
  ]
38
 
39
  # Generate all content concurrently
 
43
  return tuple(results)
44
 
45
  except Exception as e:
46
+ return (f"Error processing input: {str(e)}",) * 5
47
 
48
  def update_prompts(self, *values) -> str:
49
  """Update the current session's prompts."""
50
  self.generator.current_prompts.update(zip(
51
+ ["previews", "clips", "description", "timestamps", "titles_and_thumbnails"],
52
  values
53
  ))
54
  return "Prompts updated for this session!"
 
64
  submit_btn = gr.Button("Generate Content")
65
  outputs = [
66
  gr.Textbox(label=label, lines=10, interactive=False)
67
+ for label in ["Preview Clips", "Twitter Clips", "Twitter Description", "Timestamps", "Title & Thumbnail Suggestions"]
68
  ]
69
 
70
  async def process_wrapper(text):
 
84
  )
85
 
86
  prompt_inputs = [
87
+ gr.Textbox(
88
+ label="Preview Clips Prompt", lines=10, value=processor.generator.current_prompts["previews"]
89
+ ),
90
  gr.Textbox(
91
  label="Clips Prompt", lines=10, value=processor.generator.current_prompts["clips"]
92
  ),
prompts/previews.txt CHANGED
@@ -6,7 +6,7 @@ You are a podcast producer tasked with selecting 5-10 short, engaging clips for
6
  - Represent interesting moments, revelations, or powerful statements
7
  - Work well together to give a taste of the episode's best content
8
 
9
- Please listen to the audio and suggest 5-10 clips that would make great preview material. For each suggestion:
10
  1. Note the timestamp where the clip occurs
11
  2. Quote the relevant dialogue
12
  3. Briefly explain why this would make a good preview clip
 
6
  - Represent interesting moments, revelations, or powerful statements
7
  - Work well together to give a taste of the episode's best content
8
 
9
+ Please suggest 5-10 clips that would make great preview material. For each suggestion:
10
  1. Note the timestamp where the clip occurs
11
  2. Quote the relevant dialogue
12
  3. Briefly explain why this would make a good preview clip
utils/content_generator.py CHANGED
@@ -13,7 +13,7 @@ client = anthropic.Anthropic()
13
  class ContentRequest:
14
  prompt_key: str
15
  max_tokens: int = 2000
16
- temperature: float = 0.6
17
 
18
  class ContentGenerator:
19
  def __init__(self):
@@ -23,7 +23,7 @@ class ContentGenerator:
23
  """Load default prompts from files."""
24
  return {
25
  key: Path(f"prompts/{key}.txt").read_text()
26
- for key in ["clips", "description", "timestamps", "titles_and_thumbnails"]
27
  }
28
 
29
  def _load_examples(self, filename: str, columns: List[str]) -> str:
 
13
  class ContentRequest:
14
  prompt_key: str
15
  max_tokens: int = 2000
16
+ temperature: float = 1.0
17
 
18
  class ContentGenerator:
19
  def __init__(self):
 
23
  """Load default prompts from files."""
24
  return {
25
  key: Path(f"prompts/{key}.txt").read_text()
26
+ for key in ["previews", "clips", "description", "timestamps", "titles_and_thumbnails"]
27
  }
28
 
29
  def _load_examples(self, filename: str, columns: List[str]) -> str: