Update autogen.ts

This commit is contained in:
David Corbitt
2023-07-03 17:35:29 -07:00
parent c2c512d751
commit 9a6cb8dc95
2 changed files with 61 additions and 10 deletions

View File

@@ -1,4 +1,5 @@
import { type CreateChatCompletionRequest } from "openai";
import { type CompletionCreateParams } from "openai/resources/chat";
import { prisma } from "../db";
import { openai } from "../utils/openai";
import { pick } from "lodash";
@@ -62,7 +63,7 @@ export const autogenerateScenarioValues = async (
if (!experiment || !(variables?.length > 0) || !prompt) return {};
const messages: CreateChatCompletionRequest["messages"] = [
const messages: CompletionCreateParams.CreateChatCompletionRequestNonStreaming["messages"] = [
{
role: "system",
content:
@@ -90,7 +91,6 @@ export const autogenerateScenarioValues = async (
.forEach((vals) => {
messages.push({
role: "assistant",
// @ts-expect-error the openai type definition is wrong, the content field is required
content: null,
function_call: {
name: "add_scenario",
@@ -105,7 +105,7 @@ export const autogenerateScenarioValues = async (
}, {} as Record<string, { type: "string" }>);
try {
const completion = await openai.createChatCompletion({
const completion = await openai.chat.completions.create({
model: "gpt-3.5-turbo-0613",
messages,
functions: [
@@ -123,7 +123,7 @@ export const autogenerateScenarioValues = async (
});
const parsed = JSON.parse(
completion.data.choices[0]?.message?.function_call?.arguments ?? "{}"
completion.choices[0]?.message?.function_call?.arguments ?? "{}"
) as Record<string, string>;
return parsed;
} catch (e) {

View File

@@ -1,8 +1,59 @@
import { Configuration, OpenAIApi } from "openai";
import { omit } from "lodash";
import { env } from "~/env.mjs";
const configuration = new Configuration({
apiKey: env.OPENAI_API_KEY,
});
import OpenAI from "openai";
import { type ChatCompletion, type ChatCompletionChunk, type CompletionCreateParams } from "openai/resources/chat";
export const openai = new OpenAIApi(configuration);
// console.log("creating openai client");
export const openai = new OpenAI({ apiKey: env.OPENAI_API_KEY });
export const mergeStreamedChunks = (
base: ChatCompletion | null,
chunk: ChatCompletionChunk
): ChatCompletion => {
if (base === null) {
return mergeStreamedChunks({ ...chunk, choices: [] }, chunk);
}
const choices = [...base.choices];
for (const choice of chunk.choices) {
const baseChoice = choices.find((c) => c.index === choice.index);
if (baseChoice) {
baseChoice.finish_reason = choice.finish_reason ?? baseChoice.finish_reason;
baseChoice.message = baseChoice.message ?? { role: "assistant" };
if (choice.delta?.content)
baseChoice.message.content =
(baseChoice.message.content as string ?? "") + (choice.delta.content ?? "");
if (choice.delta?.function_call) {
const fnCall = baseChoice.message.function_call ?? {};
fnCall.name = (fnCall.name as string ?? "") + (choice.delta.function_call.name as string ?? "");
fnCall.arguments = (fnCall.arguments as string ?? "") + (choice.delta.function_call.arguments as string ?? "");
}
} else {
choices.push({ ...omit(choice, "delta"), message: { role: "assistant", ...choice.delta } });
}
}
const merged: ChatCompletion = {
...base,
choices,
};
return merged;
};
export const streamChatCompletion = async function* (body: CompletionCreateParams) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
const resp = await openai.chat.completions.create({
...body,
stream: true,
});
let mergedChunks: ChatCompletion | null = null;
for await (const part of resp) {
mergedChunks = mergeStreamedChunks(mergedChunks, part);
yield mergedChunks;
}
};