File size: 1,661 Bytes
d206d07
5d230ab
 
2400f2a
d206d07
 
 
 
 
 
 
 
 
2400f2a
 
 
 
 
 
d206d07
2400f2a
 
 
 
 
 
 
d206d07
43d9484
953d185
 
 
 
 
 
 
 
 
 
d206d07
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import openai
import gradio as gr

openai.api_key = "sk-R3HlMsYBk0NpAlLu2aA4B19054Ea4884A2Cf93D25662243d"
openai.api_base="https://apai.zyai.online/v1"

def predict(message, history):
    history_openai_format = []
    for human, assistant in history:
        history_openai_format.append({"role": "user", "content": human })
        history_openai_format.append({"role": "assistant", "content":assistant})
    history_openai_format.append({"role": "user", "content": message})

    # response = openai.ChatCompletion.create(
    #     model='gpt-3.5-turbo',
    #     messages= history_openai_format,
    #     temperature=1.0,
    #     stream=True
    # )
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",  # 对话模型的名称
        messages=history_openai_format,
        temperature=1,  # 值在[0,1]之间,越大表示回复越具有不确定性
        max_tokens=600,  # 回复最大的字符数
        top_p=1,
        frequency_penalty=0,  # [-2,2]之间,该值越大则更倾向于产生不同的内容
        presence_penalty=0,  # [-2,2]之间,该值越大则更倾向于产生不同的内容
    )
    yield response.choices[0]['message']['content']
    # print(response.choices[0]['message']['content'])
    # partial_message = ""
    # for chunk in response:
    #     print(chunk)
    #     print(chunk['choices'])
    #     print(chunk['choices'][0])
    #     print(chunk['choices'][0]['delta'])
    #     if len(chunk['choices'][0]['delta']) != 0:
    # partial_message = partial_message + chunk['choices'][0]['delta']['content']
    # yield partial_message

gr.ChatInterface(predict).queue().launch()