LymphSteamer commited on
Commit
745907c
·
verified ·
1 Parent(s): 5d50408

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  from transformers import AutoTokenizer,AutoModelForCausalLM,BitsAndBytesConfig
3
  import torch
4
- model_id = "rinna/japanese-gpt2-medium"
5
  tokenizer = AutoTokenizer.from_pretrained(model_id,trust_remote_code=True)
6
  model = AutoModelForCausalLM.from_pretrained(
7
  model_id,
@@ -25,7 +25,7 @@ def haiku_generate():
25
  pad_token_id=tokenizer.pad_token_id,
26
  do_sample=True,
27
  repetition_penalty=1.2,
28
- temperature=1.0,
29
  top_p=0.9
30
  )
31
  generated_tokens = outputs[0][prompt_len:]
 
1
  import gradio as gr
2
  from transformers import AutoTokenizer,AutoModelForCausalLM,BitsAndBytesConfig
3
  import torch
4
+ model_id = "rinna/japanese-gpt2-small"
5
  tokenizer = AutoTokenizer.from_pretrained(model_id,trust_remote_code=True)
6
  model = AutoModelForCausalLM.from_pretrained(
7
  model_id,
 
25
  pad_token_id=tokenizer.pad_token_id,
26
  do_sample=True,
27
  repetition_penalty=1.2,
28
+ temperature=0.9,
29
  top_p=0.9
30
  )
31
  generated_tokens = outputs[0][prompt_len:]