Use javascript functions for prompt completions instead of templated json

This commit is contained in:
Kyle Corbitt
2023-07-13 18:01:07 -07:00
parent 1776da937a
commit 4770ea34a8
33 changed files with 1654 additions and 215 deletions

View File

@@ -0,0 +1,15 @@
-- 1. Add a nullable constructFn column
ALTER TABLE "PromptVariant"
ADD COLUMN "constructFn" TEXT;
-- 2. Populate constructFn based on the config column
UPDATE "PromptVariant"
SET "constructFn" = 'prompt = ' || "config"::text;
-- 3. Remove the config column
ALTER TABLE "PromptVariant"
DROP COLUMN "config";
-- 4. Make constructFn not null
ALTER TABLE "PromptVariant"
ALTER COLUMN "constructFn" SET NOT NULL;

View File

@@ -29,7 +29,7 @@ model PromptVariant {
id String @id @default(uuid()) @db.Uuid
label String
config Json
constructFn String
uiId String @default(uuid()) @db.Uuid
visible Boolean @default(true)

View File

@@ -36,17 +36,17 @@ await prisma.promptVariant.createMany({
experimentId,
label: "Prompt Variant 1",
sortIndex: 0,
config: {
constructFn: `prompt = {
model: "gpt-3.5-turbo-0613",
messages: [{ role: "user", content: "What is the capital of {{country}}?" }],
temperature: 0,
},
}`,
},
{
experimentId,
label: "Prompt Variant 2",
sortIndex: 1,
config: {
constructFn: `prompt = {
model: "gpt-3.5-turbo-0613",
messages: [
{
@@ -56,7 +56,7 @@ await prisma.promptVariant.createMany({
},
],
temperature: 0,
},
}`,
},
],
});

View File

@@ -12,7 +12,7 @@ await prisma.promptVariant.createMany({
{
experimentId: functionCallsExperiment.id,
label: "No Fn Calls",
config: {
constructFn: `prompt = {
model: "gpt-3.5-turbo-0613",
messages: [
{
@@ -25,12 +25,12 @@ await prisma.promptVariant.createMany({
content: "Text:\n---\n{{text}}",
},
],
},
}`,
},
{
experimentId: functionCallsExperiment.id,
label: "Fn Calls",
config: {
constructFn: `prompt = {
model: "gpt-3.5-turbo-0613",
messages: [
{
@@ -60,7 +60,7 @@ await prisma.promptVariant.createMany({
function_call: {
name: "analyze_sentiment",
},
},
}`,
},
],
});
@@ -92,7 +92,7 @@ await prisma.promptVariant.createMany({
experimentId: redditExperiment.id,
label: "3.5 Base",
sortIndex: 0,
config: {
constructFn: `prompt = {
model: "gpt-3.5-turbo-0613",
messages: [
{
@@ -101,13 +101,13 @@ await prisma.promptVariant.createMany({
'Reddit post:\n\n title: {{title}}\n body: {{body}}\n \n How likely is it that the poster has the following need? Answer with just "high", "medium" or "low" in quotes.\n \n Need: {{need}}.',
},
],
},
}`,
},
{
experimentId: redditExperiment.id,
label: "4 Base",
sortIndex: 1,
config: {
constructFn: `prompt = {
model: "gpt-4-0613",
messages: [
{
@@ -116,13 +116,13 @@ await prisma.promptVariant.createMany({
'Reddit post:\n\n title: {{title}}\n body: {{body}}\n \n How likely is it that the poster has the following need? Answer with just "high", "medium" or "low" in quotes.\n \n Need: {{need}}.',
},
],
},
}`,
},
{
experimentId: redditExperiment.id,
label: "3.5 CoT + Functions",
sortIndex: 2,
config: {
constructFn: `prompt = {
model: "gpt-3.5-turbo-0613",
messages: [
{
@@ -161,7 +161,7 @@ await prisma.promptVariant.createMany({
function_call: {
name: "extract_relevance",
},
},
}`,
},
],
});