Adds a `modelProvider` field to `promptVariants`, currently just set to "openai/ChatCompletion" for all variants for now. Adds a `modelProviders/` directory where we can define and store pluggable model providers. Currently just OpenAI. Not everything is pluggable yet -- notably the code to actually generate completions hasn't been migrated to this setup yet. Does a lot of work to get the types working. Prompts are now defined with a function `definePrompt(modelProvider, config)` instead of `prompt = config`. Added a script to migrate old prompt definitions. This is still partial work, but the diff is large enough that I want to get it in. I don't think anything is broken but I haven't tested thoroughly.
46 lines
901 B
TypeScript
46 lines
901 B
TypeScript
import "dotenv/config";
|
|
import dedent from "dedent";
|
|
import { expect, test } from "vitest";
|
|
import { migrate1to2 } from "./migrateConstructFns";
|
|
|
|
test("migrate1to2", () => {
|
|
const constructFn = dedent`
|
|
// Test comment
|
|
|
|
prompt = {
|
|
model: "gpt-3.5-turbo-0613",
|
|
messages: [
|
|
{
|
|
role: "user",
|
|
content: "What is the capital of China?"
|
|
}
|
|
]
|
|
}
|
|
`;
|
|
|
|
const migrated = migrate1to2(constructFn);
|
|
expect(migrated).toBe(dedent`
|
|
// Test comment
|
|
|
|
definePrompt("openai/ChatCompletion", {
|
|
model: "gpt-3.5-turbo-0613",
|
|
messages: [
|
|
{
|
|
role: "user",
|
|
content: "What is the capital of China?"
|
|
}
|
|
]
|
|
})
|
|
`);
|
|
|
|
// console.log(
|
|
// migrateConstructFn(dedent`definePrompt(
|
|
// "openai/ChatCompletion",
|
|
// {
|
|
// model: 'gpt-3.5-turbo-0613',
|
|
// messages: []
|
|
// }
|
|
// )`),
|
|
// );
|
|
});
|