|
|
|
|
|
|
|
import os |
|
from openai import AsyncOpenAI |
|
import chainlit as cl |
|
from chainlit.prompt import Prompt, PromptMessage |
|
from chainlit.playground.providers import ChatOpenAI |
|
from dotenv import load_dotenv |
|
|
|
load_dotenv() |
|
|
|
|
|
system_template = """You are a helpful assistant who focuses on communicating clearly and concisely, making sure to pay careful attention to the user's input and responding appropriately. You specialize in educating new developers on the basics of programming in a way that is easy to understand and follow. Your audience consists of non-technical students who are interested in starting careers as developers. Rather than trying to provide all possible dertails of the user's question, you focus on the essentials and encourage the user to ask follow-up questions if they need more information and to clarify their thinking. |
|
""" |
|
|
|
user_template = """{input} |
|
Think through your response step by step. |
|
""" |
|
|
|
|
|
@cl.on_chat_start |
|
async def start_chat(): |
|
settings = { |
|
"model": "gpt-4o-mini", |
|
"temperature": 0, |
|
"max_tokens": 1000, |
|
"top_p": 1, |
|
"frequency_penalty": 0, |
|
"presence_penalty": 0, |
|
} |
|
|
|
cl.user_session.set("settings", settings) |
|
|
|
|
|
@cl.on_message |
|
async def main(message: cl.Message): |
|
settings = cl.user_session.get("settings") |
|
|
|
client = AsyncOpenAI() |
|
|
|
print(message.content) |
|
|
|
prompt = Prompt( |
|
provider=ChatOpenAI.id, |
|
messages=[ |
|
PromptMessage( |
|
role="system", |
|
template=system_template, |
|
formatted=system_template, |
|
), |
|
PromptMessage( |
|
role="user", |
|
template=user_template, |
|
formatted=user_template.format(input=message.content), |
|
), |
|
], |
|
inputs={"input": message.content}, |
|
settings=settings, |
|
) |
|
|
|
print([m.to_openai() for m in prompt.messages]) |
|
|
|
msg = cl.Message(content="") |
|
|
|
|
|
async for stream_resp in await client.chat.completions.create( |
|
messages=[m.to_openai() for m in prompt.messages], stream=True, **settings |
|
): |
|
token = stream_resp.choices[0].delta.content |
|
if not token: |
|
token = "" |
|
await msg.stream_token(token) |
|
|
|
|
|
prompt.completion = msg.content |
|
msg.prompt = prompt |
|
|
|
|
|
|
|
await msg.send() |
|
|