qdai commited on
Commit
19d552f
·
verified ·
1 Parent(s): 16bb6d1

upload all related file to the app

Browse files
Files changed (3) hide show
  1. app.py +41 -0
  2. call_api.py +63 -0
  3. utils.py +22 -0
app.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ from utils import format_as_chat
4
+ from call_api import generate_output
5
+
6
+
7
+ def translate(sentence,history,target_language):
8
+ prompt = f" Translate this sentence into {target_language}: '{sentence}. Please output only the translated sentence in {target_language}!"
9
+ chat_format = format_as_chat(prompt, history)
10
+ # print(chat_format)
11
+ payload = {
12
+ "inputs": chat_format,
13
+ "parameters": {
14
+ "do_sample": False,
15
+ "max_new_tokens": 400
16
+ }
17
+ }
18
+ # print(payload)
19
+ response = generate_output(payload)
20
+ output = response['generated_text']
21
+ # print(response)
22
+ parts = output.split('assistant\n\n')
23
+ return parts[-1].strip()
24
+
25
+
26
+ # res = translate("Awesome, Now I can focus on my career without repetition.",'Chinese',[])
27
+ # print(f"Translated result: {res}")
28
+
29
+ with gr.Blocks() as demo:
30
+ system_prompt = gr.Textbox(value="German", label = "Target Language")
31
+
32
+ gr.ChatInterface(
33
+ translate,
34
+ additional_inputs=[system_prompt],
35
+ examples=[
36
+ ["Today is Friday!", "German"], ["Let's have fun.","Chinese"], ["See you tomorrow.","Arabic"]],
37
+ description="Enter an English sentence, choose a target language, I will translate it into the target language for you.",
38
+ title="Llama 3 8B Instruct, Machine Translation from English into any other language."
39
+ )
40
+
41
+ demo.launch(share=True)
call_api.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+
3
+
4
+ ## Call the API using Python
5
+ def generate_output(payload):
6
+
7
+ # Sending the request
8
+ response = requests.post('https://uf9t072wj5ki2ho4.eu-west-1.aws.endpoints.huggingface.cloud/generate', json=payload)
9
+
10
+ # Handling the response
11
+ data = response.json()
12
+
13
+ return data
14
+
15
+
16
+
17
+
18
+
19
+ # Payload for the request
20
+ #
21
+ # payload1 = {
22
+ # "inputs": "Howdy!",
23
+ # "parameters": {
24
+ # "do_sample": False,
25
+ # "max_new_tokens": 40
26
+ # }
27
+ # }
28
+ #
29
+ # output1 = generate_output(payload1)
30
+ # print(f"output1: {output1}")
31
+ #
32
+ # formatted_input = (
33
+ # "<|begin_of_text|><|start_header_id|>user<|end_header_id|>\n\nHowdy!<|eot_id|>"
34
+ # )
35
+ # payload2 = {
36
+ # "inputs": formatted_input,
37
+ # "parameters": {
38
+ # "do_sample": False,
39
+ # "max_new_tokens": 40
40
+ # }
41
+ # }
42
+ #
43
+ #
44
+ # output2 = generate_output(payload2)
45
+ # print(f"output2: {output2}")
46
+ #
47
+ # multi_turn_input = "<|begin_of_text|>" \
48
+ # "<|start_header_id|>user<|end_header_id|>\n\nHowdy!<|eot_id|>" \
49
+ # "<|start_header_id|>assistant<|end_header_id|>\n\nHowdy back atcha! What brings you to these here parts?<|eot_id|>" \
50
+ # "<|start_header_id|>user<|end_header_id|>\n\nMy assignments!<|eot_id|>"
51
+ #
52
+ # payload3 = {
53
+ # "inputs": multi_turn_input,
54
+ # "parameters": {
55
+ # "do_sample": False,
56
+ # "max_new_tokens": 40
57
+ # }
58
+ # }
59
+ #
60
+ #
61
+ # output3 = generate_output(payload3)
62
+ # print(f"output3: {output3}")
63
+ #
utils.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List
2
+
3
+
4
+ def format_as_chat(message: str, history: List[List[str]]) -> str:
5
+ """
6
+ Given a message and a history of previous messages, returns a string that formats the conversation as a chat.
7
+ Uses the format expected by Meta Llama 3 Instruct.
8
+
9
+ :param message: A string containing the user's most recent message
10
+ :param history: A list of lists of previous messages, where each sublist is a conversation turn:
11
+ [[user_message1, assistant_reply1], [user_message2, assistant_reply2], ...]
12
+ """
13
+ chat_format = "<|begin_of_text|>"
14
+ if len(history) > 0:
15
+ for turn in history:
16
+ user_message, assistant_message = turn
17
+ chat_format += f"<|start_header_id|>user<|end_header_id|>\n\n{user_message}<|eot_id|>"
18
+ chat_format += f"<|start_header_id|>assistant<|end_header_id|>\n\n{assistant_message}<|eot_id|>"
19
+
20
+ # Append the most recent user message
21
+ chat_format += f"<|start_header_id|>user<|end_header_id|>\n\n{message}<|eot_id|>"
22
+ return chat_format