Chris4K commited on
Commit
e51e935
1 Parent(s): ef2c1bc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -6
app.py CHANGED
@@ -17,6 +17,33 @@ from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration
17
  from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration
18
 
19
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
  ####
22
  ## Sentinent models
@@ -45,17 +72,17 @@ chat_model_facebook_blenderbot_400M_distill = "facebook/blenderbot-400M-distill"
45
  chat_model_HenryJJ_vincua_13b = "HenryJJ/vincua-13b"
46
 
47
  # https://colab.research.google.com/drive/1hrS6_g14EcOD4ezwSGlGX2zxJegX5uNX#scrollTo=NUwUR9U7qkld
48
- llm_hf = HuggingFaceHub(
49
- repo_id= chat_model_HenryJJ_vincua_13b,
50
- model_kwargs={"temperature":0.9 }
51
- )
52
 
53
 
54
 
55
  text = "Why did the chicken cross the road?"
56
 
57
- output_question_1 = llm_hf(text)
58
- print(output_question_1)
59
 
60
 
61
 
 
17
  from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration
18
 
19
 
20
+ ###
21
+ # Definition of different purspose prompts
22
+ # https://huggingface.co/spaces/Chris4K/rlhf-arena/edit/main/app.py
23
+ ####
24
+ def prompt_human_instruct(system_msg, history):
25
+ return system_msg.strip() + "\n" + \
26
+ "\n".join(["\n".join(["###Human: "+item[0], "###Assistant: "+item[1]])
27
+ for item in history])
28
+
29
+
30
+ def prompt_instruct(system_msg, history):
31
+ return system_msg.strip() + "\n" + \
32
+ "\n".join(["\n".join(["### Instruction: "+item[0], "### Response: "+item[1]])
33
+ for item in history])
34
+
35
+
36
+ def prompt_chat(system_msg, history):
37
+ return system_msg.strip() + "\n" + \
38
+ "\n".join(["\n".join(["USER: "+item[0], "ASSISTANT: "+item[1]])
39
+ for item in history])
40
+
41
+
42
+ def prompt_roleplay(system_msg, history):
43
+ return "<|system|>" + system_msg.strip() + "\n" + \
44
+ "\n".join(["\n".join(["<|user|>"+item[0], "<|model|>"+item[1]])
45
+ for item in history])
46
+
47
 
48
  ####
49
  ## Sentinent models
 
72
  chat_model_HenryJJ_vincua_13b = "HenryJJ/vincua-13b"
73
 
74
  # https://colab.research.google.com/drive/1hrS6_g14EcOD4ezwSGlGX2zxJegX5uNX#scrollTo=NUwUR9U7qkld
75
+ #llm_hf = HuggingFaceHub(
76
+ # repo_id= chat_model_HenryJJ_vincua_13b,
77
+ # model_kwargs={"temperature":0.9 }
78
+ #)
79
 
80
 
81
 
82
  text = "Why did the chicken cross the road?"
83
 
84
+ #output_question_1 = llm_hf(text)
85
+ #print(output_question_1)
86
 
87
 
88