Use javascript functions for prompt completions instead of templated json
This commit is contained in:
@@ -20,6 +20,14 @@ const config = {
|
||||
locales: ["en"],
|
||||
defaultLocale: "en",
|
||||
},
|
||||
|
||||
webpack: (config) => {
|
||||
config.module.rules.push({
|
||||
test: /\.txt$/,
|
||||
use: "raw-loader",
|
||||
});
|
||||
return config;
|
||||
},
|
||||
};
|
||||
|
||||
export default nextRoutes()(config);
|
||||
|
||||
10
package.json
10
package.json
@@ -11,7 +11,7 @@
|
||||
"postinstall": "prisma generate",
|
||||
"lint": "next lint",
|
||||
"start": "next start",
|
||||
"codegen": "tsx src/codegen/export-openai-schema.ts"
|
||||
"codegen": "tsx src/codegen/export-openai-types.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@chakra-ui/next-js": "^2.1.4",
|
||||
@@ -19,7 +19,7 @@
|
||||
"@emotion/react": "^11.11.1",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@monaco-editor/react": "^4.5.1",
|
||||
"@monaco-editor/loader": "^1.3.3",
|
||||
"@next-auth/prisma-adapter": "^1.0.5",
|
||||
"@prisma/client": "^4.14.0",
|
||||
"@t3-oss/env-nextjs": "^0.3.1",
|
||||
@@ -33,10 +33,13 @@
|
||||
"concurrently": "^8.2.0",
|
||||
"cors": "^2.8.5",
|
||||
"dayjs": "^1.11.8",
|
||||
"dedent": "^1.0.1",
|
||||
"dotenv": "^16.3.1",
|
||||
"express": "^4.18.2",
|
||||
"framer-motion": "^10.12.17",
|
||||
"gpt-tokens": "^1.0.10",
|
||||
"immer": "^10.0.2",
|
||||
"isolated-vm": "^4.5.0",
|
||||
"json-stringify-pretty-compact": "^4.0.0",
|
||||
"lodash": "^4.17.21",
|
||||
"next": "^13.4.2",
|
||||
@@ -74,9 +77,12 @@
|
||||
"eslint": "^8.40.0",
|
||||
"eslint-config-next": "^13.4.2",
|
||||
"eslint-plugin-unused-imports": "^2.0.0",
|
||||
"openapi-typescript": "^6.3.4",
|
||||
"prettier": "^3.0.0",
|
||||
"prisma": "^4.14.0",
|
||||
"raw-loader": "^4.0.2",
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "^0.33.0",
|
||||
"yaml": "^2.3.1"
|
||||
},
|
||||
"ct3aMetadata": {
|
||||
|
||||
1096
pnpm-lock.yaml
generated
1096
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,15 @@
|
||||
-- 1. Add a nullable constructFn column
|
||||
ALTER TABLE "PromptVariant"
|
||||
ADD COLUMN "constructFn" TEXT;
|
||||
|
||||
-- 2. Populate constructFn based on the config column
|
||||
UPDATE "PromptVariant"
|
||||
SET "constructFn" = 'prompt = ' || "config"::text;
|
||||
|
||||
-- 3. Remove the config column
|
||||
ALTER TABLE "PromptVariant"
|
||||
DROP COLUMN "config";
|
||||
|
||||
-- 4. Make constructFn not null
|
||||
ALTER TABLE "PromptVariant"
|
||||
ALTER COLUMN "constructFn" SET NOT NULL;
|
||||
@@ -29,7 +29,7 @@ model PromptVariant {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
label String
|
||||
|
||||
config Json
|
||||
constructFn String
|
||||
|
||||
uiId String @default(uuid()) @db.Uuid
|
||||
visible Boolean @default(true)
|
||||
|
||||
@@ -36,17 +36,17 @@ await prisma.promptVariant.createMany({
|
||||
experimentId,
|
||||
label: "Prompt Variant 1",
|
||||
sortIndex: 0,
|
||||
config: {
|
||||
constructFn: `prompt = {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [{ role: "user", content: "What is the capital of {{country}}?" }],
|
||||
temperature: 0,
|
||||
},
|
||||
}`,
|
||||
},
|
||||
{
|
||||
experimentId,
|
||||
label: "Prompt Variant 2",
|
||||
sortIndex: 1,
|
||||
config: {
|
||||
constructFn: `prompt = {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
{
|
||||
@@ -56,7 +56,7 @@ await prisma.promptVariant.createMany({
|
||||
},
|
||||
],
|
||||
temperature: 0,
|
||||
},
|
||||
}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
@@ -12,7 +12,7 @@ await prisma.promptVariant.createMany({
|
||||
{
|
||||
experimentId: functionCallsExperiment.id,
|
||||
label: "No Fn Calls",
|
||||
config: {
|
||||
constructFn: `prompt = {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
{
|
||||
@@ -25,12 +25,12 @@ await prisma.promptVariant.createMany({
|
||||
content: "Text:\n---\n{{text}}",
|
||||
},
|
||||
],
|
||||
},
|
||||
}`,
|
||||
},
|
||||
{
|
||||
experimentId: functionCallsExperiment.id,
|
||||
label: "Fn Calls",
|
||||
config: {
|
||||
constructFn: `prompt = {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
{
|
||||
@@ -60,7 +60,7 @@ await prisma.promptVariant.createMany({
|
||||
function_call: {
|
||||
name: "analyze_sentiment",
|
||||
},
|
||||
},
|
||||
}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
@@ -92,7 +92,7 @@ await prisma.promptVariant.createMany({
|
||||
experimentId: redditExperiment.id,
|
||||
label: "3.5 Base",
|
||||
sortIndex: 0,
|
||||
config: {
|
||||
constructFn: `prompt = {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
{
|
||||
@@ -101,13 +101,13 @@ await prisma.promptVariant.createMany({
|
||||
'Reddit post:\n\n title: {{title}}\n body: {{body}}\n \n How likely is it that the poster has the following need? Answer with just "high", "medium" or "low" in quotes.\n \n Need: {{need}}.',
|
||||
},
|
||||
],
|
||||
},
|
||||
}`,
|
||||
},
|
||||
{
|
||||
experimentId: redditExperiment.id,
|
||||
label: "4 Base",
|
||||
sortIndex: 1,
|
||||
config: {
|
||||
constructFn: `prompt = {
|
||||
model: "gpt-4-0613",
|
||||
messages: [
|
||||
{
|
||||
@@ -116,13 +116,13 @@ await prisma.promptVariant.createMany({
|
||||
'Reddit post:\n\n title: {{title}}\n body: {{body}}\n \n How likely is it that the poster has the following need? Answer with just "high", "medium" or "low" in quotes.\n \n Need: {{need}}.',
|
||||
},
|
||||
],
|
||||
},
|
||||
}`,
|
||||
},
|
||||
{
|
||||
experimentId: redditExperiment.id,
|
||||
label: "3.5 CoT + Functions",
|
||||
sortIndex: 2,
|
||||
config: {
|
||||
constructFn: `prompt = {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
{
|
||||
@@ -161,7 +161,7 @@ await prisma.promptVariant.createMany({
|
||||
function_call: {
|
||||
name: "extract_relevance",
|
||||
},
|
||||
},
|
||||
}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
@@ -1,19 +1,19 @@
|
||||
{
|
||||
"name": "",
|
||||
"short_name": "",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/android-chrome-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "/android-chrome-512x512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
],
|
||||
"theme_color": "#ffffff",
|
||||
"background_color": "#ffffff",
|
||||
"display": "standalone"
|
||||
"name": "",
|
||||
"short_name": "",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/android-chrome-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "/android-chrome-512x512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
],
|
||||
"theme_color": "#ffffff",
|
||||
"background_color": "#ffffff",
|
||||
"display": "standalone"
|
||||
}
|
||||
|
||||
52
src/codegen/export-openai-types.ts
Normal file
52
src/codegen/export-openai-types.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import openapiTS, { type OpenAPI3 } from "openapi-typescript";
|
||||
import YAML from "yaml";
|
||||
import _ from "lodash";
|
||||
import assert from "assert";
|
||||
|
||||
const OPENAPI_URL =
|
||||
"https://raw.githubusercontent.com/openai/openai-openapi/0c432eb66fd0c758fd8b9bd69db41c1096e5f4db/openapi.yaml";
|
||||
|
||||
// Generate TypeScript types from OpenAPI
|
||||
|
||||
const schema = await fetch(OPENAPI_URL)
|
||||
.then((res) => res.text())
|
||||
.then((txt) => YAML.parse(txt) as OpenAPI3);
|
||||
|
||||
console.log(schema.components?.schemas?.CreateChatCompletionRequest);
|
||||
|
||||
// @ts-expect-error just assume this works, the assert will catch it if it doesn't
|
||||
const modelProperty = schema.components?.schemas?.CreateChatCompletionRequest?.properties?.model;
|
||||
|
||||
assert(modelProperty.oneOf.length === 2, "Expected model to have oneOf length of 2");
|
||||
|
||||
// We need to do a bit of surgery here since the Monaco editor doesn't like
|
||||
// the fact that the schema says `model` can be either a string or an enum,
|
||||
// and displays a warning in the editor. Let's stick with just an enum for
|
||||
// now and drop the string option.
|
||||
modelProperty.type = "string";
|
||||
modelProperty.enum = modelProperty.oneOf[1].enum;
|
||||
modelProperty.oneOf = undefined;
|
||||
|
||||
delete schema["paths"];
|
||||
assert(schema.components?.schemas);
|
||||
schema.components.schemas = _.pick(schema.components?.schemas, [
|
||||
"CreateChatCompletionRequest",
|
||||
"ChatCompletionRequestMessage",
|
||||
"ChatCompletionFunctions",
|
||||
"ChatCompletionFunctionParameters",
|
||||
]);
|
||||
console.log(schema);
|
||||
|
||||
let openApiTypes = await openapiTS(schema);
|
||||
|
||||
// Remove the `export` from any line that starts with `export`
|
||||
openApiTypes = openApiTypes.replaceAll("\nexport ", "\n");
|
||||
|
||||
// Get the directory of the current script
|
||||
const currentDirectory = path.dirname(import.meta.url).replace("file://", "");
|
||||
|
||||
// Write the TypeScript types. We only want to use this in our in-app editor, so
|
||||
// save as a .txt so VS Code doesn't try to auto-import definitions from it.
|
||||
fs.writeFileSync(path.join(currentDirectory, "openai.types.ts.txt"), openApiTypes);
|
||||
148
src/codegen/openai.types.ts.txt
Normal file
148
src/codegen/openai.types.ts.txt
Normal file
@@ -0,0 +1,148 @@
|
||||
/**
|
||||
* This file was auto-generated by openapi-typescript.
|
||||
* Do not make direct changes to the file.
|
||||
*/
|
||||
|
||||
|
||||
/** OneOf type helpers */
|
||||
type Without<T, U> = { [P in Exclude<keyof T, keyof U>]?: never };
|
||||
type XOR<T, U> = (T | U) extends object ? (Without<T, U> & U) | (Without<U, T> & T) : T | U;
|
||||
type OneOf<T extends any[]> = T extends [infer Only] ? Only : T extends [infer A, infer B, ...infer Rest] ? OneOf<[XOR<A, B>, ...Rest]> : never;
|
||||
|
||||
type paths = Record<string, never>;
|
||||
|
||||
type webhooks = Record<string, never>;
|
||||
|
||||
interface components {
|
||||
schemas: {
|
||||
CreateChatCompletionRequest: {
|
||||
/**
|
||||
* @description ID of the model to use. See the [model endpoint compatibility](/docs/models/model-endpoint-compatibility) table for details on which models work with the Chat API.
|
||||
* @example gpt-3.5-turbo
|
||||
* @enum {string}
|
||||
*/
|
||||
model: "gpt-4" | "gpt-4-0613" | "gpt-4-32k" | "gpt-4-32k-0613" | "gpt-3.5-turbo" | "gpt-3.5-turbo-16k" | "gpt-3.5-turbo-0613" | "gpt-3.5-turbo-16k-0613";
|
||||
/** @description A list of messages comprising the conversation so far. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb). */
|
||||
messages: (components["schemas"]["ChatCompletionRequestMessage"])[];
|
||||
/** @description A list of functions the model may generate JSON inputs for. */
|
||||
functions?: (components["schemas"]["ChatCompletionFunctions"])[];
|
||||
/** @description Controls how the model responds to function calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between an end-user or calling a function. Specifying a particular function via `{"name":\ "my_function"}` forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present. */
|
||||
function_call?: OneOf<["none" | "auto", {
|
||||
/** @description The name of the function to call. */
|
||||
name: string;
|
||||
}]>;
|
||||
/**
|
||||
* @description What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.
|
||||
*
|
||||
* We generally recommend altering this or `top_p` but not both.
|
||||
*
|
||||
* @default 1
|
||||
* @example 1
|
||||
*/
|
||||
temperature?: number | null;
|
||||
/**
|
||||
* @description An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
|
||||
*
|
||||
* We generally recommend altering this or `temperature` but not both.
|
||||
*
|
||||
* @default 1
|
||||
* @example 1
|
||||
*/
|
||||
top_p?: number | null;
|
||||
/**
|
||||
* @description How many chat completion choices to generate for each input message.
|
||||
* @default 1
|
||||
* @example 1
|
||||
*/
|
||||
n?: number | null;
|
||||
/**
|
||||
* @description If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) as they become available, with the stream terminated by a `data: [DONE]` message. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb).
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
stream?: boolean | null;
|
||||
/**
|
||||
* @description Up to 4 sequences where the API will stop generating further tokens.
|
||||
*
|
||||
* @default null
|
||||
*/
|
||||
stop?: (string | null) | (string)[];
|
||||
/**
|
||||
* @description The maximum number of [tokens](/tokenizer) to generate in the chat completion.
|
||||
*
|
||||
* The total length of input tokens and generated tokens is limited by the model's context length. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.
|
||||
*
|
||||
* @default inf
|
||||
*/
|
||||
max_tokens?: number;
|
||||
/**
|
||||
* @description Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.
|
||||
*
|
||||
* [See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)
|
||||
*
|
||||
* @default 0
|
||||
*/
|
||||
presence_penalty?: number | null;
|
||||
/**
|
||||
* @description Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
|
||||
*
|
||||
* [See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)
|
||||
*
|
||||
* @default 0
|
||||
*/
|
||||
frequency_penalty?: number | null;
|
||||
/**
|
||||
* @description Modify the likelihood of specified tokens appearing in the completion.
|
||||
*
|
||||
* Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.
|
||||
*
|
||||
* @default null
|
||||
*/
|
||||
logit_bias?: Record<string, unknown> | null;
|
||||
/**
|
||||
* @description A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).
|
||||
*
|
||||
* @example user-1234
|
||||
*/
|
||||
user?: string;
|
||||
};
|
||||
ChatCompletionRequestMessage: {
|
||||
/**
|
||||
* @description The role of the messages author. One of `system`, `user`, `assistant`, or `function`.
|
||||
* @enum {string}
|
||||
*/
|
||||
role: "system" | "user" | "assistant" | "function";
|
||||
/** @description The contents of the message. `content` is required for all messages except assistant messages with function calls. */
|
||||
content?: string;
|
||||
/** @description The name of the author of this message. `name` is required if role is `function`, and it should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. */
|
||||
name?: string;
|
||||
/** @description The name and arguments of a function that should be called, as generated by the model. */
|
||||
function_call?: {
|
||||
/** @description The name of the function to call. */
|
||||
name?: string;
|
||||
/** @description The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function. */
|
||||
arguments?: string;
|
||||
};
|
||||
};
|
||||
ChatCompletionFunctions: {
|
||||
/** @description The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64. */
|
||||
name: string;
|
||||
/** @description The description of what the function does. */
|
||||
description?: string;
|
||||
parameters?: components["schemas"]["ChatCompletionFunctionParameters"];
|
||||
};
|
||||
/** @description The parameters the functions accepts, described as a JSON Schema object. See the [guide](/docs/guides/gpt/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format. */
|
||||
ChatCompletionFunctionParameters: {
|
||||
[key: string]: unknown;
|
||||
};
|
||||
};
|
||||
responses: never;
|
||||
parameters: never;
|
||||
requestBodies: never;
|
||||
headers: never;
|
||||
pathItems: never;
|
||||
}
|
||||
|
||||
type external = Record<string, never>;
|
||||
|
||||
type operations = Record<string, never>;
|
||||
6
src/codegen/tsconfig.json
Normal file
6
src/codegen/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext"
|
||||
}
|
||||
}
|
||||
@@ -10,8 +10,6 @@ import { type ChatCompletion } from "openai/resources/chat";
|
||||
import { generateChannel } from "~/utils/generateChannel";
|
||||
import { isObject } from "lodash";
|
||||
import useSocket from "~/utils/useSocket";
|
||||
import { type JSONSerializable } from "~/server/types";
|
||||
import { getModelName } from "~/server/utils/getModelName";
|
||||
import { OutputStats } from "./OutputStats";
|
||||
import { ErrorHandler } from "./ErrorHandler";
|
||||
|
||||
@@ -36,10 +34,12 @@ export default function OutputCell({
|
||||
|
||||
if (!templateHasVariables) disabledReason = "Add a value to the scenario variables to see output";
|
||||
|
||||
if (variant.config === null || Object.keys(variant.config).length === 0)
|
||||
disabledReason = "Save your prompt variant to see output";
|
||||
// if (variant.config === null || Object.keys(variant.config).length === 0)
|
||||
// disabledReason = "Save your prompt variant to see output";
|
||||
|
||||
const model = getModelName(variant.config as JSONSerializable);
|
||||
// const model = getModelName(variant.config as JSONSerializable);
|
||||
// TODO: Temporarily hardcoding this while we get other stuff working
|
||||
const model = "gpt-3.5-turbo";
|
||||
|
||||
const outputMutation = api.outputs.get.useMutation();
|
||||
|
||||
|
||||
@@ -107,7 +107,7 @@ export default function ScenarioEditor({
|
||||
cursor: "pointer",
|
||||
}}
|
||||
>
|
||||
<Icon as={hidingInProgress ? Spinner :BsX} boxSize={6} />
|
||||
<Icon as={hidingInProgress ? Spinner : BsX} boxSize={6} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
<Icon
|
||||
|
||||
@@ -8,13 +8,13 @@ import {
|
||||
Heading,
|
||||
Stack,
|
||||
} from "@chakra-ui/react";
|
||||
import { useStore } from "~/utils/store";
|
||||
import EditScenarioVars from "./EditScenarioVars";
|
||||
import EditEvaluations from "./EditEvaluations";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
export default function SettingsDrawer() {
|
||||
const isOpen = useStore((state) => state.drawerOpen);
|
||||
const closeDrawer = useStore((state) => state.closeDrawer);
|
||||
const isOpen = useAppStore((state) => state.drawerOpen);
|
||||
const closeDrawer = useAppStore((state) => state.closeDrawer);
|
||||
|
||||
return (
|
||||
<Drawer isOpen={isOpen} placement="right" onClose={closeDrawer} size="md">
|
||||
|
||||
@@ -1,66 +1,67 @@
|
||||
import { Box, Button, HStack, Tooltip, useToast } from "@chakra-ui/react";
|
||||
import { useMonaco } from "@monaco-editor/react";
|
||||
import { useRef, useEffect, useState, useCallback, useMemo } from "react";
|
||||
import { useRef, useEffect, useState, useCallback } from "react";
|
||||
import { useHandledAsyncCallback, useModifierKeyLabel } from "~/utils/hooks";
|
||||
import { type PromptVariant } from "./types";
|
||||
import { type JSONSerializable } from "~/server/types";
|
||||
import { api } from "~/utils/api";
|
||||
import openaiSchema from "~/codegen/openai.schema.json";
|
||||
|
||||
let isEditorConfigured = false;
|
||||
import { useAppStore } from "~/state/store";
|
||||
// import openAITypes from "~/codegen/openai.types.ts.txt";
|
||||
|
||||
export default function VariantConfigEditor(props: { variant: PromptVariant }) {
|
||||
const monaco = useMonaco();
|
||||
const monaco = useAppStore.use.variantEditor.monaco();
|
||||
const editorRef = useRef<ReturnType<NonNullable<typeof monaco>["editor"]["create"]> | null>(null);
|
||||
const [editorId] = useState(() => `editor_${Math.random().toString(36).substring(7)}`);
|
||||
const [isChanged, setIsChanged] = useState(false);
|
||||
|
||||
const savedConfig = useMemo(
|
||||
() => JSON.stringify(props.variant.config, null, 2),
|
||||
[props.variant.config],
|
||||
);
|
||||
const savedConfigRef = useRef(savedConfig);
|
||||
const lastSavedFn = props.variant.constructFn;
|
||||
|
||||
const modifierKey = useModifierKeyLabel();
|
||||
|
||||
const checkForChanges = useCallback(() => {
|
||||
if (!editorRef.current) return;
|
||||
const currentConfig = editorRef.current.getValue();
|
||||
setIsChanged(currentConfig !== savedConfigRef.current);
|
||||
}, []);
|
||||
setIsChanged(currentConfig !== lastSavedFn);
|
||||
}, [lastSavedFn]);
|
||||
|
||||
const replaceWithConfig = api.promptVariants.replaceWithConfig.useMutation();
|
||||
const replaceVariant = api.promptVariants.replaceVariant.useMutation();
|
||||
const utils = api.useContext();
|
||||
const toast = useToast();
|
||||
|
||||
const [onSave] = useHandledAsyncCallback(async () => {
|
||||
const currentConfig = editorRef.current?.getValue();
|
||||
if (!currentConfig) return;
|
||||
const currentFn = editorRef.current?.getValue();
|
||||
if (!currentFn) return;
|
||||
|
||||
let parsedConfig: JSONSerializable;
|
||||
try {
|
||||
parsedConfig = JSON.parse(currentConfig) as JSONSerializable;
|
||||
} catch (e) {
|
||||
// Check if the editor has any typescript errors
|
||||
const model = editorRef.current?.getModel();
|
||||
if (!model) return;
|
||||
|
||||
const markers = monaco?.editor.getModelMarkers({ resource: model.uri });
|
||||
const hasErrors = markers?.some((m) => m.severity === monaco?.MarkerSeverity.Error);
|
||||
|
||||
if (hasErrors) {
|
||||
toast({
|
||||
title: "Invalid JSON",
|
||||
description: "Please fix the JSON before saving.",
|
||||
title: "Invalid TypeScript",
|
||||
description: "Please fix the TypeScript errors before saving.",
|
||||
status: "error",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (parsedConfig === null) {
|
||||
// Make sure the user defined the prompt with the string "prompt\w*=" somewhere
|
||||
const promptRegex = /prompt\s*=/;
|
||||
if (!promptRegex.test(currentFn)) {
|
||||
console.log("no prompt");
|
||||
console.log(currentFn);
|
||||
toast({
|
||||
title: "Invalid JSON",
|
||||
description: "Please fix the JSON before saving.",
|
||||
title: "Missing prompt",
|
||||
description: "Please define the prompt (eg. `prompt = { ...`).",
|
||||
status: "error",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await replaceWithConfig.mutateAsync({
|
||||
await replaceVariant.mutateAsync({
|
||||
id: props.variant.id,
|
||||
config: currentConfig,
|
||||
constructFn: currentFn,
|
||||
});
|
||||
|
||||
await utils.promptVariants.list.invalidate();
|
||||
@@ -70,37 +71,11 @@ export default function VariantConfigEditor(props: { variant: PromptVariant }) {
|
||||
|
||||
useEffect(() => {
|
||||
if (monaco) {
|
||||
if (!isEditorConfigured) {
|
||||
monaco.editor.defineTheme("customTheme", {
|
||||
base: "vs",
|
||||
inherit: true,
|
||||
rules: [],
|
||||
colors: {
|
||||
"editor.background": "#fafafa",
|
||||
},
|
||||
});
|
||||
monaco.languages.json.jsonDefaults.setDiagnosticsOptions({
|
||||
validate: true,
|
||||
schemas: [
|
||||
{
|
||||
uri: "https://api.openai.com/v1",
|
||||
fileMatch: ["*"],
|
||||
schema: {
|
||||
$schema: "http://json-schema.org/draft-07/schema#",
|
||||
$ref: "#/components/schemas/CreateChatCompletionRequest",
|
||||
components: openaiSchema.components,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
isEditorConfigured = true;
|
||||
}
|
||||
|
||||
const container = document.getElementById(editorId) as HTMLElement;
|
||||
|
||||
editorRef.current = monaco.editor.create(container, {
|
||||
value: savedConfig,
|
||||
language: "json",
|
||||
value: lastSavedFn,
|
||||
language: "typescript",
|
||||
theme: "customTheme",
|
||||
lineNumbers: "off",
|
||||
minimap: { enabled: false },
|
||||
@@ -114,6 +89,7 @@ export default function VariantConfigEditor(props: { variant: PromptVariant }) {
|
||||
},
|
||||
wordWrapBreakAfterCharacters: "",
|
||||
wordWrapBreakBeforeCharacters: "",
|
||||
quickSuggestions: true,
|
||||
});
|
||||
|
||||
editorRef.current.onDidFocusEditorText(() => {
|
||||
@@ -141,17 +117,17 @@ export default function VariantConfigEditor(props: { variant: PromptVariant }) {
|
||||
/* eslint-disable-next-line react-hooks/exhaustive-deps */
|
||||
}, [monaco, editorId]);
|
||||
|
||||
useEffect(() => {
|
||||
const savedConfigChanged = savedConfigRef.current !== savedConfig;
|
||||
// useEffect(() => {
|
||||
// const savedConfigChanged = lastSavedFn !== savedConfig;
|
||||
|
||||
savedConfigRef.current = savedConfig;
|
||||
// lastSavedFn = savedConfig;
|
||||
|
||||
if (savedConfigChanged && editorRef.current?.getValue() !== savedConfig) {
|
||||
editorRef.current?.setValue(savedConfig);
|
||||
}
|
||||
// if (savedConfigChanged && editorRef.current?.getValue() !== savedConfig) {
|
||||
// editorRef.current?.setValue(savedConfig);
|
||||
// }
|
||||
|
||||
checkForChanges();
|
||||
}, [savedConfig, checkForChanges]);
|
||||
// checkForChanges();
|
||||
// }, [savedConfig, checkForChanges]);
|
||||
|
||||
return (
|
||||
<Box w="100%" pos="relative">
|
||||
@@ -162,7 +138,7 @@ export default function VariantConfigEditor(props: { variant: PromptVariant }) {
|
||||
colorScheme="gray"
|
||||
size="sm"
|
||||
onClick={() => {
|
||||
editorRef.current?.setValue(savedConfig);
|
||||
editorRef.current?.setValue(lastSavedFn);
|
||||
checkForChanges();
|
||||
}}
|
||||
>
|
||||
@@ -3,12 +3,12 @@ import { api } from "~/utils/api";
|
||||
import NewScenarioButton from "./NewScenarioButton";
|
||||
import NewVariantButton from "./NewVariantButton";
|
||||
import ScenarioRow from "./ScenarioRow";
|
||||
import VariantConfigEditor from "./VariantConfigEditor";
|
||||
import VariantConfigEditor from "./VariantEditor";
|
||||
import VariantHeader from "./VariantHeader";
|
||||
import { cellPadding } from "../constants";
|
||||
import { BsPencil } from "react-icons/bs";
|
||||
import { useStore } from "~/utils/store";
|
||||
import VariantStats from "./VariantStats";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
const stickyHeaderStyle: SystemStyleObject = {
|
||||
position: "sticky",
|
||||
@@ -22,7 +22,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
||||
{ experimentId: experimentId as string },
|
||||
{ enabled: !!experimentId },
|
||||
);
|
||||
const openDrawer = useStore((s) => s.openDrawer);
|
||||
const openDrawer = useAppStore((s) => s.openDrawer);
|
||||
|
||||
const scenarios = api.scenarios.list.useQuery(
|
||||
{ experimentId: experimentId as string },
|
||||
@@ -57,7 +57,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
||||
py={cellPadding.y}
|
||||
// TODO: This is a hack to get the sticky header to work. It's not ideal because it's not responsive to the height of the header,
|
||||
// so if the header height changes, this will need to be updated.
|
||||
sx={{...stickyHeaderStyle, top: "-337px"}}
|
||||
sx={{ ...stickyHeaderStyle, top: "-337px" }}
|
||||
>
|
||||
<HStack w="100%">
|
||||
<Heading size="xs" fontWeight="bold" flex={1}>
|
||||
|
||||
@@ -29,7 +29,8 @@ import SettingsDrawer from "~/components/OutputsTable/SettingsDrawer";
|
||||
import AppShell from "~/components/nav/AppShell";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { useStore } from "~/utils/store";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { useSyncVariantEditor } from "~/state/sync";
|
||||
|
||||
const DeleteButton = () => {
|
||||
const experiment = useExperiment();
|
||||
@@ -94,7 +95,8 @@ export default function Experiment() {
|
||||
const router = useRouter();
|
||||
const experiment = useExperiment();
|
||||
const utils = api.useContext();
|
||||
const openDrawer = useStore((s) => s.openDrawer);
|
||||
const openDrawer = useAppStore((s) => s.openDrawer);
|
||||
useSyncVariantEditor();
|
||||
|
||||
const [label, setLabel] = useState(experiment.data?.label || "");
|
||||
useEffect(() => {
|
||||
|
||||
@@ -70,13 +70,10 @@ export const autogenerateScenarioValues = async (
|
||||
},
|
||||
];
|
||||
|
||||
const promptText = JSON.stringify(prompt.config);
|
||||
if (variables.some((variable) => promptHasVariable(promptText, variable.label))) {
|
||||
messages.push({
|
||||
role: "user",
|
||||
content: `Prompt template:\n---\n${promptText}`,
|
||||
});
|
||||
}
|
||||
messages.push({
|
||||
role: "user",
|
||||
content: `Prompt constructor function:\n---\n${prompt.constructFn}`,
|
||||
});
|
||||
|
||||
existingScenarios
|
||||
.map(
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { z } from "zod";
|
||||
import { createTRPCRouter, publicProcedure } from "~/server/api/trpc";
|
||||
import { prisma } from "~/server/db";
|
||||
import dedent from "dedent";
|
||||
|
||||
export const experimentsRouter = createTRPCRouter({
|
||||
list: publicProcedure.query(async () => {
|
||||
@@ -69,16 +70,11 @@ export const experimentsRouter = createTRPCRouter({
|
||||
experimentId: exp.id,
|
||||
label: "Prompt Variant 1",
|
||||
sortIndex: 0,
|
||||
config: {
|
||||
constructFn: dedent`prompt = {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
stream: true,
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: "Return 'Ready to go!'",
|
||||
},
|
||||
],
|
||||
},
|
||||
messages: [{ role: "system", content: "Return 'Ready to go!'" }],
|
||||
}`,
|
||||
},
|
||||
}),
|
||||
prisma.testScenario.create({
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import { z } from "zod";
|
||||
import { createTRPCRouter, publicProcedure } from "~/server/api/trpc";
|
||||
import { prisma } from "~/server/db";
|
||||
import { fillTemplateJson, type VariableMap } from "~/server/utils/fillTemplate";
|
||||
import { type JSONSerializable } from "~/server/types";
|
||||
import crypto from "crypto";
|
||||
import type { Prisma } from "@prisma/client";
|
||||
import { reevaluateVariant } from "~/server/utils/evaluations";
|
||||
import { getCompletion } from "~/server/utils/getCompletion";
|
||||
import { constructPrompt } from "~/server/utils/constructPrompt";
|
||||
|
||||
export const modelOutputsRouter = createTRPCRouter({
|
||||
get: publicProcedure
|
||||
@@ -44,15 +43,9 @@ export const modelOutputsRouter = createTRPCRouter({
|
||||
|
||||
if (!variant || !scenario) return null;
|
||||
|
||||
const filledTemplate = fillTemplateJson(
|
||||
variant.config as JSONSerializable,
|
||||
scenario.variableValues as VariableMap,
|
||||
);
|
||||
const prompt = await constructPrompt(variant, scenario);
|
||||
|
||||
const inputHash = crypto
|
||||
.createHash("sha256")
|
||||
.update(JSON.stringify(filledTemplate))
|
||||
.digest("hex");
|
||||
const inputHash = crypto.createHash("sha256").update(JSON.stringify(prompt)).digest("hex");
|
||||
|
||||
// TODO: we should probably only use this if temperature=0
|
||||
const existingResponse = await prisma.modelOutput.findFirst({
|
||||
@@ -72,7 +65,7 @@ export const modelOutputsRouter = createTRPCRouter({
|
||||
};
|
||||
} else {
|
||||
try {
|
||||
modelResponse = await getCompletion(filledTemplate, input.channel);
|
||||
modelResponse = await getCompletion(prompt, input.channel);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
throw e;
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { z } from "zod";
|
||||
import { createTRPCRouter, publicProcedure } from "~/server/api/trpc";
|
||||
import { prisma } from "~/server/db";
|
||||
import { type OpenAIChatConfig } from "~/server/types";
|
||||
import { getModelName } from "~/server/utils/getModelName";
|
||||
import { recordExperimentUpdated } from "~/server/utils/recordExperimentUpdated";
|
||||
import { calculateTokenCost } from "~/utils/calculateTokenCost";
|
||||
|
||||
@@ -59,7 +57,9 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
},
|
||||
});
|
||||
|
||||
const model = getModelName(variant.config);
|
||||
// TODO: fix this
|
||||
const model = "gpt-3.5-turbo-0613";
|
||||
// const model = getModelName(variant.config);
|
||||
|
||||
const promptTokens = overallTokens._sum?.promptTokens ?? 0;
|
||||
const overallPromptCost = calculateTokenCost(model, promptTokens);
|
||||
@@ -105,13 +105,13 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
experimentId: input.experimentId,
|
||||
label: `Prompt Variant ${largestSortIndex + 2}`,
|
||||
sortIndex: (lastVariant?.sortIndex ?? 0) + 1,
|
||||
config: lastVariant?.config ?? {},
|
||||
constructFn: lastVariant?.constructFn ?? "",
|
||||
},
|
||||
});
|
||||
|
||||
const [newVariant] = await prisma.$transaction([
|
||||
createNewVariantAction,
|
||||
recordExperimentUpdated(input.experimentId)
|
||||
recordExperimentUpdated(input.experimentId),
|
||||
]);
|
||||
|
||||
return newVariant;
|
||||
@@ -146,7 +146,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
|
||||
const [updatedPromptVariant] = await prisma.$transaction([
|
||||
updatePromptVariantAction,
|
||||
recordExperimentUpdated(existing.experimentId)
|
||||
recordExperimentUpdated(existing.experimentId),
|
||||
]);
|
||||
|
||||
return updatedPromptVariant;
|
||||
@@ -159,7 +159,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
const updatedPromptVariant = await prisma.promptVariant.update({
|
||||
const updatedPromptVariant = await prisma.promptVariant.update({
|
||||
where: { id: input.id },
|
||||
data: { visible: false, experiment: { update: { updatedAt: new Date() } } },
|
||||
});
|
||||
@@ -167,11 +167,11 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
return updatedPromptVariant;
|
||||
}),
|
||||
|
||||
replaceWithConfig: publicProcedure
|
||||
replaceVariant: publicProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
config: z.string(),
|
||||
constructFn: z.string(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
@@ -181,13 +181,6 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
},
|
||||
});
|
||||
|
||||
let parsedConfig;
|
||||
try {
|
||||
parsedConfig = JSON.parse(input.config) as OpenAIChatConfig;
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid JSON: ${(e as Error).message}`);
|
||||
}
|
||||
|
||||
if (!existing) {
|
||||
throw new Error(`Prompt Variant with id ${input.id} does not exist`);
|
||||
}
|
||||
@@ -199,7 +192,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
label: existing.label,
|
||||
sortIndex: existing.sortIndex,
|
||||
uiId: existing.uiId,
|
||||
config: parsedConfig,
|
||||
constructFn: input.constructFn,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -218,7 +211,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
|
||||
await prisma.$transaction([
|
||||
hideOldVariantsAction,
|
||||
recordExperimentUpdated(existing.experimentId)
|
||||
recordExperimentUpdated(existing.experimentId),
|
||||
]);
|
||||
|
||||
return newVariant;
|
||||
|
||||
@@ -50,12 +50,12 @@ export const scenariosRouter = createTRPCRouter({
|
||||
|
||||
await prisma.$transaction([
|
||||
createNewScenarioAction,
|
||||
recordExperimentUpdated(input.experimentId)
|
||||
recordExperimentUpdated(input.experimentId),
|
||||
]);
|
||||
}),
|
||||
|
||||
hide: publicProcedure.input(z.object({ id: z.string() })).mutation(async ({ input }) => {
|
||||
const hiddenScenario = await prisma.testScenario.update({
|
||||
const hiddenScenario = await prisma.testScenario.update({
|
||||
where: { id: input.id },
|
||||
data: { visible: false, experiment: { update: { updatedAt: new Date() } } },
|
||||
});
|
||||
|
||||
17
src/server/utils/constructPrompt.test.ts
Normal file
17
src/server/utils/constructPrompt.test.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { test } from "vitest";
|
||||
import { constructPrompt } from "./constructPrompt";
|
||||
|
||||
test.skip("constructPrompt", async () => {
|
||||
const constructed = await constructPrompt(
|
||||
{
|
||||
constructFn: `prompt = { "fooz": "bar" }`,
|
||||
},
|
||||
{
|
||||
variableValues: {
|
||||
foo: "bar",
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
console.log(constructed);
|
||||
});
|
||||
37
src/server/utils/constructPrompt.ts
Normal file
37
src/server/utils/constructPrompt.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { type PromptVariant, type TestScenario } from "@prisma/client";
|
||||
import ivm from "isolated-vm";
|
||||
import { type JSONSerializable } from "../types";
|
||||
|
||||
const isolate = new ivm.Isolate({ memoryLimit: 128 });
|
||||
|
||||
export async function constructPrompt(
|
||||
variant: Pick<PromptVariant, "constructFn">,
|
||||
testScenario: Pick<TestScenario, "variableValues">,
|
||||
): Promise<JSONSerializable> {
|
||||
const scenario = testScenario.variableValues as JSONSerializable;
|
||||
|
||||
const code = `
|
||||
const scenario = ${JSON.stringify(scenario, null, 2)};
|
||||
let prompt
|
||||
|
||||
${variant.constructFn}
|
||||
|
||||
global.prompt = prompt;
|
||||
`;
|
||||
|
||||
console.log("code is", code);
|
||||
|
||||
const context = await isolate.createContext();
|
||||
|
||||
const jail = context.global;
|
||||
await jail.set("global", jail.derefInto());
|
||||
|
||||
const script = await isolate.compileScript(code);
|
||||
|
||||
await script.run(context);
|
||||
const promptReference = (await context.global.get("prompt")) as ivm.Reference;
|
||||
|
||||
const prompt = await promptReference.copy(); // Get the actual value from the isolate
|
||||
|
||||
return prompt as JSONSerializable;
|
||||
}
|
||||
@@ -13,7 +13,11 @@ export const reevaluateVariant = async (variantId: string) => {
|
||||
});
|
||||
|
||||
const modelOutputs = await prisma.modelOutput.findMany({
|
||||
where: { promptVariantId: variantId, statusCode: { notIn: [429] }, testScenario: { visible: true } },
|
||||
where: {
|
||||
promptVariantId: variantId,
|
||||
statusCode: { notIn: [429] },
|
||||
testScenario: { visible: true },
|
||||
},
|
||||
include: { testScenario: true },
|
||||
});
|
||||
|
||||
@@ -96,4 +100,4 @@ export const reevaluateAll = async (experimentId: string) => {
|
||||
});
|
||||
|
||||
await Promise.all(evaluations.map(reevaluateEvaluation));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -8,8 +8,6 @@ import {
|
||||
type CompletionCreateParams,
|
||||
} from "openai/resources/chat";
|
||||
|
||||
// console.log("creating openai client");
|
||||
|
||||
export const openai = new OpenAI({ apiKey: env.OPENAI_API_KEY });
|
||||
|
||||
export const mergeStreamedChunks = (
|
||||
|
||||
35
src/state/createSelectors.ts
Normal file
35
src/state/createSelectors.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { type StoreApi, type UseBoundStore } from "zustand";
|
||||
|
||||
type NestedSelectors<T> = {
|
||||
[K in keyof T]: T[K] extends object
|
||||
? { [NestedK in keyof T[K]]: () => T[K][NestedK] }
|
||||
: () => T[K];
|
||||
};
|
||||
|
||||
type WithSelectors<S> = S extends { getState: () => infer T }
|
||||
? S & { use: NestedSelectors<T> }
|
||||
: never;
|
||||
|
||||
// Adapted from https://docs.pmnd.rs/zustand/guides/auto-generating-selectors
|
||||
|
||||
/* eslint-disable */
|
||||
|
||||
export const createSelectors = <S extends UseBoundStore<StoreApi<object>>>(_store: S) => {
|
||||
const store = _store as WithSelectors<typeof _store>;
|
||||
store.use = {};
|
||||
for (const k of Object.keys(store.getState())) {
|
||||
// @ts-expect-error black magic
|
||||
const stateValue = store.getState()[k];
|
||||
if (typeof stateValue === "object" && stateValue !== null) {
|
||||
(store.use as any)[k] = {};
|
||||
for (const nestedK of Object.keys(stateValue)) {
|
||||
// @ts-expect-error black magic
|
||||
(store.use as any)[k][nestedK] = () => store((s) => s[k][nestedK as keyof (typeof s)[k]]);
|
||||
}
|
||||
} else {
|
||||
(store.use as any)[k] = () => store((s) => s[k as keyof typeof s]);
|
||||
}
|
||||
}
|
||||
|
||||
return store;
|
||||
};
|
||||
35
src/state/store.ts
Normal file
35
src/state/store.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { type StateCreator, create } from "zustand";
|
||||
import { immer } from "zustand/middleware/immer";
|
||||
import { createSelectors } from "./createSelectors";
|
||||
import { type VariantEditorSlice, createVariantEditorSlice } from "./variantEditor.slice";
|
||||
|
||||
export type State = {
|
||||
drawerOpen: boolean;
|
||||
openDrawer: () => void;
|
||||
closeDrawer: () => void;
|
||||
variantEditor: VariantEditorSlice;
|
||||
};
|
||||
|
||||
export type SliceCreator<T> = StateCreator<State, [["zustand/immer", never]], [], T>;
|
||||
|
||||
export type SetFn = Parameters<SliceCreator<unknown>>[0];
|
||||
export type GetFn = Parameters<SliceCreator<unknown>>[1];
|
||||
|
||||
const useBaseStore = create<State, [["zustand/immer", never]]>(
|
||||
immer((set, get, ...rest) => ({
|
||||
drawerOpen: false,
|
||||
openDrawer: () =>
|
||||
set((state) => {
|
||||
state.drawerOpen = true;
|
||||
}),
|
||||
closeDrawer: () =>
|
||||
set((state) => {
|
||||
state.drawerOpen = false;
|
||||
}),
|
||||
variantEditor: createVariantEditorSlice(set, get, ...rest),
|
||||
})),
|
||||
);
|
||||
|
||||
export const useAppStore = createSelectors(useBaseStore);
|
||||
|
||||
useAppStore.getState().variantEditor.loadMonaco().catch(console.error);
|
||||
17
src/state/sync.ts
Normal file
17
src/state/sync.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { useEffect } from "react";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment } from "~/utils/hooks";
|
||||
import { useAppStore } from "./store";
|
||||
|
||||
export function useSyncVariantEditor() {
|
||||
const experiment = useExperiment();
|
||||
const scenarios = api.scenarios.list.useQuery(
|
||||
{ experimentId: experiment.data?.id ?? "" },
|
||||
{ enabled: !!experiment.data?.id },
|
||||
);
|
||||
useEffect(() => {
|
||||
if (scenarios.data) {
|
||||
useAppStore.getState().variantEditor.setScenarios(scenarios.data);
|
||||
}
|
||||
}, [scenarios.data]);
|
||||
}
|
||||
87
src/state/variantEditor.slice.ts
Normal file
87
src/state/variantEditor.slice.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { type RouterOutputs } from "~/utils/api";
|
||||
import { type SliceCreator } from "./store";
|
||||
import loader from "@monaco-editor/loader";
|
||||
import openAITypes from "~/codegen/openai.types.ts.txt";
|
||||
|
||||
export type VariantEditorSlice = {
|
||||
monaco: null | ReturnType<typeof loader.__getMonacoInstance>;
|
||||
loadMonaco: () => Promise<void>;
|
||||
scenarios: RouterOutputs["scenarios"]["list"];
|
||||
updateScenariosModel: () => void;
|
||||
setScenarios: (scenarios: RouterOutputs["scenarios"]["list"]) => void;
|
||||
};
|
||||
|
||||
export const createVariantEditorSlice: SliceCreator<VariantEditorSlice> = (set, get) => ({
|
||||
monaco: loader.__getMonacoInstance(),
|
||||
loadMonaco: async () => {
|
||||
const monaco = await loader.init();
|
||||
|
||||
monaco.editor.defineTheme("customTheme", {
|
||||
base: "vs",
|
||||
inherit: true,
|
||||
rules: [],
|
||||
colors: {
|
||||
"editor.background": "#fafafa",
|
||||
},
|
||||
});
|
||||
|
||||
monaco.languages.typescript.typescriptDefaults.setCompilerOptions({
|
||||
allowNonTsExtensions: true,
|
||||
lib: ["esnext"],
|
||||
});
|
||||
|
||||
monaco.editor.createModel(
|
||||
`
|
||||
${openAITypes}
|
||||
|
||||
declare var prompt: components["schemas"]["CreateChatCompletionRequest"];
|
||||
`,
|
||||
"typescript",
|
||||
monaco.Uri.parse("file:///openai.types.ts"),
|
||||
);
|
||||
|
||||
set((state) => {
|
||||
state.variantEditor.monaco = monaco;
|
||||
});
|
||||
get().variantEditor.updateScenariosModel();
|
||||
},
|
||||
scenarios: [],
|
||||
// scenariosModel: null,
|
||||
setScenarios: (scenarios) => {
|
||||
set((state) => {
|
||||
state.variantEditor.scenarios = scenarios;
|
||||
});
|
||||
|
||||
get().variantEditor.updateScenariosModel();
|
||||
},
|
||||
|
||||
updateScenariosModel: () => {
|
||||
const monaco = get().variantEditor.monaco;
|
||||
if (!monaco) return;
|
||||
|
||||
const modelContents = `
|
||||
const scenarios = ${JSON.stringify(
|
||||
get().variantEditor.scenarios.map((s) => s.variableValues),
|
||||
null,
|
||||
2,
|
||||
)} as const;
|
||||
|
||||
type Scenario = typeof scenarios[number];
|
||||
declare var scenario: Scenario | null;
|
||||
`;
|
||||
|
||||
console.log(modelContents);
|
||||
|
||||
const scenariosModel = monaco.editor.getModel(monaco.Uri.parse("file:///scenarios.ts"));
|
||||
|
||||
if (scenariosModel) {
|
||||
scenariosModel.setValue(modelContents);
|
||||
} else {
|
||||
monaco.editor.createModel(
|
||||
modelContents,
|
||||
"typescript",
|
||||
monaco.Uri.parse("file:///scenarios.ts"),
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
6
src/types.d.ts
vendored
Normal file
6
src/types.d.ts
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
// Any import that ends in .txt should be treated as a string
|
||||
|
||||
declare module "*.txt" {
|
||||
const content: string;
|
||||
export default content;
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import { create } from "zustand";
|
||||
|
||||
type StoreState = {
|
||||
drawerOpen: boolean;
|
||||
openDrawer: () => void;
|
||||
closeDrawer: () => void;
|
||||
};
|
||||
|
||||
export const useStore = create<StoreState>()((set) => ({
|
||||
drawerOpen: false,
|
||||
openDrawer: () => set({ drawerOpen: true }),
|
||||
closeDrawer: () => set({ drawerOpen: false }),
|
||||
}));
|
||||
Reference in New Issue
Block a user