Prep for more model providers
Adds a `modelProvider` field to `promptVariants`, currently just set to "openai/ChatCompletion" for all variants for now. Adds a `modelProviders/` directory where we can define and store pluggable model providers. Currently just OpenAI. Not everything is pluggable yet -- notably the code to actually generate completions hasn't been migrated to this setup yet. Does a lot of work to get the types working. Prompts are now defined with a function `definePrompt(modelProvider, config)` instead of `prompt = config`. Added a script to migrate old prompt definitions. This is still partial work, but the diff is large enough that I want to get it in. I don't think anything is broken but I haven't tested thoroughly.
This commit is contained in:
54
src/modelProviders/openai-ChatCompletion/index.ts
Normal file
54
src/modelProviders/openai-ChatCompletion/index.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { type JSONSchema4 } from "json-schema";
|
||||
import { type ModelProvider } from "../types";
|
||||
import inputSchema from "./codegen/input.schema.json";
|
||||
import { type CompletionCreateParams } from "openai/resources/chat";
|
||||
|
||||
const supportedModels = [
|
||||
"gpt-4-0613",
|
||||
"gpt-4-32k-0613",
|
||||
"gpt-3.5-turbo-0613",
|
||||
"gpt-3.5-turbo-16k-0613",
|
||||
] as const;
|
||||
|
||||
type SupportedModel = (typeof supportedModels)[number];
|
||||
|
||||
const modelProvider: ModelProvider<SupportedModel, CompletionCreateParams> = {
|
||||
name: "OpenAI ChatCompletion",
|
||||
models: {
|
||||
"gpt-4-0613": {
|
||||
name: "GPT-4",
|
||||
learnMore: "https://openai.com/gpt-4",
|
||||
},
|
||||
"gpt-4-32k-0613": {
|
||||
name: "GPT-4 32k",
|
||||
learnMore: "https://openai.com/gpt-4",
|
||||
},
|
||||
"gpt-3.5-turbo-0613": {
|
||||
name: "GPT-3.5 Turbo",
|
||||
learnMore: "https://platform.openai.com/docs/guides/gpt/chat-completions-api",
|
||||
},
|
||||
"gpt-3.5-turbo-16k-0613": {
|
||||
name: "GPT-3.5 Turbo 16k",
|
||||
learnMore: "https://platform.openai.com/docs/guides/gpt/chat-completions-api",
|
||||
},
|
||||
},
|
||||
getModel: (input) => {
|
||||
if (supportedModels.includes(input.model as SupportedModel))
|
||||
return input.model as SupportedModel;
|
||||
|
||||
const modelMaps: Record<string, SupportedModel> = {
|
||||
"gpt-4": "gpt-4-0613",
|
||||
"gpt-4-32k": "gpt-4-32k-0613",
|
||||
"gpt-3.5-turbo": "gpt-3.5-turbo-0613",
|
||||
"gpt-3.5-turbo-16k": "gpt-3.5-turbo-16k-0613",
|
||||
};
|
||||
|
||||
if (input.model in modelMaps) return modelMaps[input.model] as SupportedModel;
|
||||
|
||||
return null;
|
||||
},
|
||||
inputSchema: inputSchema as JSONSchema4,
|
||||
shouldStream: (input) => input.stream ?? false,
|
||||
};
|
||||
|
||||
export default modelProvider;
|
||||
Reference in New Issue
Block a user