matt HOFFNER
fix lint and cheaper model
b8fadbb
raw
history blame
1.23 kB
import OpenAI from "openai";
import { OpenAIStream, StreamingTextResponse } from "ai";
import { createSearchApi } from "@/app/tools/search";
import { createOddsApi } from "@/app/tools/odds";
import { createSportsResultsApi } from "@/app/tools/scores";
import { createCoinMarketCapApi } from "@/app/tools/coin";
const [, serpApiSchema] = createSearchApi({ apiKey: process.env.SERP_API_KEY || '' });
const [, sportsApiResultsSchema] = createSportsResultsApi({ apiKey: process.env.SERP_API_KEY || '' });
const [, oddsApiSchema] = createOddsApi({ apiKey: process.env.ODDS_API_KEY || '' });
const [, coinMarketCapApiSchema] = createCoinMarketCapApi({ apiKey: process.env.COINMARKETCAP_API_KEY || '' });
const functions: any[] = [
serpApiSchema,
oddsApiSchema,
sportsApiResultsSchema,
coinMarketCapApiSchema
];
export async function POST(req: Request) {
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY
});
const { messages, function_call } = await req.json()
const response = await openai.chat.completions.create({
model: 'gpt-3.5-turbo',
stream: true,
messages,
functions,
function_call
})
const stream = OpenAIStream(response)
return new StreamingTextResponse(stream)
}