Deepak Sahu commited on
Commit
7519558
·
1 Parent(s): b1228d0

test faster response workaround 2

Browse files
Files changed (1) hide show
  1. z_hypothetical_summary.py +5 -3
z_hypothetical_summary.py CHANGED
@@ -8,8 +8,11 @@ from transformers import pipeline, set_seed
8
  set_seed(42)
9
  TRAINED_CASUAL_MODEL = "LunaticMaestro/gpt2-book-summary-generator"
10
 
 
11
 
12
- generator_model = pipeline('text-generation', model=TRAINED_CASUAL_MODEL)
 
 
13
 
14
 
15
  def generate_summaries(book_title: str, genre: Optional[str] = None, n_samples=2, top_k = 50, top_p = 0.85, model=None) -> list[str]:
@@ -27,11 +30,10 @@ def generate_summaries(book_title: str, genre: Optional[str] = None, n_samples=2
27
  '''
28
  global generator_model
29
 
30
- generator_model
31
  if model:
32
  generator_model = model
33
  else:
34
- generator_model = generator_model
35
 
36
  # basic prompt very similary to one used in fine-tuning
37
  prompt = f'''Book Title: {book_title}
 
8
  set_seed(42)
9
  TRAINED_CASUAL_MODEL = "LunaticMaestro/gpt2-book-summary-generator"
10
 
11
+ generator_model = None
12
 
13
+ def load_model():
14
+ global generator_model
15
+ generator_model = pipeline('text-generation', model=TRAINED_CASUAL_MODEL)
16
 
17
 
18
  def generate_summaries(book_title: str, genre: Optional[str] = None, n_samples=2, top_k = 50, top_p = 0.85, model=None) -> list[str]:
 
30
  '''
31
  global generator_model
32
 
 
33
  if model:
34
  generator_model = model
35
  else:
36
+ generator_model = generator_model if generator_model is not None else load_model()
37
 
38
  # basic prompt very similary to one used in fine-tuning
39
  prompt = f'''Book Title: {book_title}