Compare commits
94 Commits
gh-btn
...
output-wra
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b2af83341d | ||
|
|
e6d229d5f9 | ||
|
|
1a6ae3aef7 | ||
|
|
9051d80775 | ||
|
|
6c060c6ea0 | ||
|
|
f70e73e338 | ||
|
|
16aa6672fc | ||
|
|
ac99c8e0f7 | ||
|
|
df121db78c | ||
|
|
816b41adad | ||
|
|
f09dfe18be | ||
|
|
868d7084f0 | ||
|
|
f6f04e537e | ||
|
|
4feb3e5829 | ||
|
|
c62ced867a | ||
|
|
7bb414026e | ||
|
|
1b2b6c1456 | ||
|
|
760bfbbe32 | ||
|
|
3424aa36ba | ||
|
|
ded86cba08 | ||
|
|
65a0f9065f | ||
|
|
2861c64428 | ||
|
|
ca33bb0b08 | ||
|
|
72e680e77c | ||
|
|
5dd7e67396 | ||
|
|
fd286f6874 | ||
|
|
7a4aa5f0aa | ||
|
|
cb791e3c73 | ||
|
|
a2c322ff43 | ||
|
|
a2ace63f25 | ||
|
|
41d06596cb | ||
|
|
49c68fdbf2 | ||
|
|
6188f55569 | ||
|
|
ea91d692d3 | ||
|
|
ae7acbfdd4 | ||
|
|
b9396e63cc | ||
|
|
753a48f6e9 | ||
|
|
bd7c8b43b0 | ||
|
|
a1249f17c9 | ||
|
|
6f8db40f74 | ||
|
|
8c5345a291 | ||
|
|
f47010a6e7 | ||
|
|
6d32f1c06e | ||
|
|
8fed9730da | ||
|
|
0f9a83cf45 | ||
|
|
9f17d98736 | ||
|
|
74029e5478 | ||
|
|
d220cd30e8 | ||
|
|
c0f10cd522 | ||
|
|
dc497dbd99 | ||
|
|
f8f855adf4 | ||
|
|
8f49bace53 | ||
|
|
c9f59bfb79 | ||
|
|
57166e96b4 | ||
|
|
1a838824ae | ||
|
|
6b304f8456 | ||
|
|
a53d70d8b2 | ||
|
|
109a9ddb1e | ||
|
|
7f8b574c9f | ||
|
|
9e859c199e | ||
|
|
deabbb094b | ||
|
|
7637b94ea7 | ||
|
|
721f1726eb | ||
|
|
cfeb4dfa92 | ||
|
|
21ef67ed4c | ||
|
|
7707d451e0 | ||
|
|
d82782adb4 | ||
|
|
e10589abff | ||
|
|
01dcbfc896 | ||
|
|
50e0b34d30 | ||
|
|
44bb9fc58d | ||
|
|
c0d3784f0c | ||
|
|
e522026b71 | ||
|
|
46b13d85b7 | ||
|
|
c12aa82a3e | ||
|
|
b98bce8944 | ||
|
|
f045c80dfd | ||
|
|
3b460dff2a | ||
|
|
5fa5732804 | ||
|
|
28e6e2b9df | ||
|
|
54d1df4442 | ||
|
|
f69c2b5f23 | ||
|
|
51f0666f6a | ||
|
|
b67d974f4c | ||
|
|
33fb2db981 | ||
|
|
e391379c3e | ||
|
|
8d1609dd52 | ||
|
|
f3380f302d | ||
|
|
3dba9c7ee1 | ||
|
|
e0e4f7a9d6 | ||
|
|
48293dc579 | ||
|
|
38ac6243a0 | ||
|
|
bd2f58e2a5 | ||
|
|
808e47c6b9 |
@@ -6,6 +6,10 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: app
|
||||
|
||||
jobs:
|
||||
run-checks:
|
||||
runs-on: ubuntu-latest
|
||||
77
README.md
@@ -1,8 +1,12 @@
|
||||
<img src="https://github.com/openpipe/openpipe/assets/41524992/ca59596e-eb80-40f9-921f-6d67f6e6d8fa" width="72px" />
|
||||
<!-- <img src="https://github.com/openpipe/openpipe/assets/41524992/ca59596e-eb80-40f9-921f-6d67f6e6d8fa" width="72px" /> -->
|
||||
|
||||
# OpenPipe
|
||||
|
||||
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts with realistic sample data.
|
||||
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts, and can automatically [translate](#-translate-between-model-apis) those prompts between models.
|
||||
|
||||
<img src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="demo">
|
||||
|
||||
You can use our hosted version of OpenPipe at https://openpipe.ai. You can also clone this repository and [run it locally](#running-locally).
|
||||
|
||||
## Sample Experiments
|
||||
|
||||
@@ -13,47 +17,46 @@ These are simple experiments users have created that show how OpenPipe works. Fe
|
||||
- [OpenAI Function Calls](https://app.openpipe.ai/experiments/2ebbdcb3-ed51-456e-87dc-91f72eaf3e2b)
|
||||
- [Activity Classification](https://app.openpipe.ai/experiments/3950940f-ab6b-4b74-841d-7e9dbc4e4ff8)
|
||||
|
||||
<img src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="demo">
|
||||
|
||||
You can use our hosted version of OpenPipe at https://openpipe.ai. You can also clone this repository and [run it locally](#running-locally).
|
||||
|
||||
## High-Level Features
|
||||
|
||||
**Visualize Responses**
|
||||
Inspect prompt completions side-by-side.
|
||||
|
||||
<br>
|
||||
|
||||
**Test Many Inputs**
|
||||
OpenPipe lets you _template_ a prompt. Use the templating feature to run the prompts you're testing against many potential inputs for broad coverage of your problem space.
|
||||
|
||||
<br>
|
||||
|
||||
**Translate between Model APIs**
|
||||
Write your prompt in one format and automatically convert it to work with any other model.
|
||||
|
||||
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/1e19ccf2-96b6-4e93-a3a5-1449710d1b5b" alt="translate between models">
|
||||
|
||||
<br><br>
|
||||
**Refine your prompts automatically**
|
||||
Use a growing database of best-practice refinements to improve your prompts automatically.
|
||||
|
||||
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/87a27fe7-daef-445c-a5e2-1c82b23f9f99" alt="add function call">
|
||||
|
||||
<br><br>
|
||||
**🪄 Auto-generate Test Scenarios**
|
||||
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
|
||||
|
||||
<img width="600" src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="auto-generate">
|
||||
|
||||
<br><br>
|
||||
|
||||
## Supported Models
|
||||
|
||||
- All models available through the OpenAI [chat completion API](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
|
||||
- Llama2 [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat), [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat), [70b chat](https://replicate.com/replicate/llama70b-v2-chat).
|
||||
- Anthropic's [Claude 1 Instant](https://www.anthropic.com/index/introducing-claude) and [Claude 2](https://www.anthropic.com/index/claude-2)
|
||||
|
||||
## Features
|
||||
|
||||
### 🔍 Visualize Responses
|
||||
|
||||
Inspect prompt completions side-by-side.
|
||||
|
||||
### 🧪 Bulk-Test
|
||||
|
||||
OpenPipe lets you _template_ a prompt. Use the templating feature to run the prompts you're testing against many potential inputs for broad coverage of your problem space.
|
||||
|
||||
### 📟 Translate between Model APIs
|
||||
|
||||
Write your prompt in one format and automatically convert it to work with any other model.
|
||||
|
||||
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/1e19ccf2-96b6-4e93-a3a5-1449710d1b5b" alt="translate between models">
|
||||
|
||||
<br><br>
|
||||
|
||||
### 🛠️ Refine Your Prompts Automatically
|
||||
|
||||
Use a growing database of best-practice refinements to improve your prompts automatically.
|
||||
|
||||
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/87a27fe7-daef-445c-a5e2-1c82b23f9f99" alt="add function call">
|
||||
|
||||
<br><br>
|
||||
|
||||
### 🪄 Auto-generate Test Scenarios
|
||||
|
||||
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
|
||||
|
||||
<img width="600" src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="auto-generate">
|
||||
|
||||
<br><br>
|
||||
|
||||
## Running Locally
|
||||
|
||||
1. Install [Postgresql](https://www.postgresql.org/download/).
|
||||
|
||||
@@ -26,6 +26,11 @@ NEXT_PUBLIC_SOCKET_URL="http://localhost:3318"
|
||||
NEXTAUTH_SECRET="your_secret"
|
||||
NEXTAUTH_URL="http://localhost:3000"
|
||||
|
||||
NEXT_PUBLIC_HOST="http://localhost:3000"
|
||||
|
||||
# Next Auth Github Provider
|
||||
GITHUB_CLIENT_ID="your_client_id"
|
||||
GITHUB_CLIENT_SECRET="your_secret"
|
||||
|
||||
OPENPIPE_BASE_URL="http://localhost:3000/api"
|
||||
OPENPIPE_API_KEY="your_key"
|
||||
3
.gitignore → app/.gitignore
vendored
@@ -40,3 +40,6 @@ yarn-error.log*
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
|
||||
# Sentry Auth Token
|
||||
.sentryclirc
|
||||
@@ -12,12 +12,20 @@ declare module "nextjs-routes" {
|
||||
|
||||
export type Route =
|
||||
| StaticRoute<"/account/signin">
|
||||
| DynamicRoute<"/api/[...trpc]", { "trpc": string[] }>
|
||||
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
|
||||
| StaticRoute<"/api/experiments/og-image">
|
||||
| StaticRoute<"/api/openapi">
|
||||
| StaticRoute<"/api/sentry-example-api">
|
||||
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
|
||||
| DynamicRoute<"/data/[id]", { "id": string }>
|
||||
| StaticRoute<"/data">
|
||||
| DynamicRoute<"/experiments/[id]", { "id": string }>
|
||||
| StaticRoute<"/experiments">
|
||||
| StaticRoute<"/">
|
||||
| StaticRoute<"/logged-calls">
|
||||
| StaticRoute<"/project/settings">
|
||||
| StaticRoute<"/sentry-example-page">
|
||||
| StaticRoute<"/world-champs">
|
||||
| StaticRoute<"/world-champs/signup">;
|
||||
|
||||
@@ -20,6 +20,10 @@ FROM base as builder
|
||||
# Include all NEXT_PUBLIC_* env vars here
|
||||
ARG NEXT_PUBLIC_POSTHOG_KEY
|
||||
ARG NEXT_PUBLIC_SOCKET_URL
|
||||
ARG NEXT_PUBLIC_HOST
|
||||
ARG NEXT_PUBLIC_SENTRY_DSN
|
||||
ARG SENTRY_AUTH_TOKEN
|
||||
ARG NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS
|
||||
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
61
app/next.config.mjs
Normal file
@@ -0,0 +1,61 @@
|
||||
import nextRoutes from "nextjs-routes/config";
|
||||
import { withSentryConfig } from "@sentry/nextjs";
|
||||
|
||||
/**
|
||||
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially useful
|
||||
* for Docker builds.
|
||||
*/
|
||||
const { env } = await import("./src/env.mjs");
|
||||
|
||||
/** @type {import("next").NextConfig} */
|
||||
let config = {
|
||||
reactStrictMode: true,
|
||||
|
||||
/**
|
||||
* If you have `experimental: { appDir: true }` set, then you must comment the below `i18n` config
|
||||
* out.
|
||||
*
|
||||
* @see https://github.com/vercel/next.js/issues/41980
|
||||
*/
|
||||
i18n: {
|
||||
locales: ["en"],
|
||||
defaultLocale: "en",
|
||||
},
|
||||
|
||||
rewrites: async () => [
|
||||
{
|
||||
source: "/ingest/:path*",
|
||||
destination: "https://app.posthog.com/:path*",
|
||||
},
|
||||
],
|
||||
|
||||
webpack: (config) => {
|
||||
config.module.rules.push({
|
||||
test: /\.txt$/,
|
||||
use: "raw-loader",
|
||||
});
|
||||
return config;
|
||||
},
|
||||
};
|
||||
|
||||
config = nextRoutes()(config);
|
||||
|
||||
if (env.NEXT_PUBLIC_SENTRY_DSN && env.SENTRY_AUTH_TOKEN) {
|
||||
// @ts-expect-error - `withSentryConfig` is not typed correctly
|
||||
config = withSentryConfig(
|
||||
config,
|
||||
{
|
||||
authToken: env.SENTRY_AUTH_TOKEN,
|
||||
silent: true,
|
||||
org: "openpipe",
|
||||
project: "openpipe",
|
||||
},
|
||||
{
|
||||
widenClientFileUpload: true,
|
||||
tunnelRoute: "/monitoring",
|
||||
disableLogger: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
export default config;
|
||||
7
app/openapitools.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json",
|
||||
"spaces": 2,
|
||||
"generator-cli": {
|
||||
"version": "6.6.0"
|
||||
}
|
||||
}
|
||||
@@ -16,9 +16,10 @@
|
||||
"postinstall": "prisma generate",
|
||||
"lint": "next lint",
|
||||
"start": "next start",
|
||||
"codegen": "tsx src/codegen/export-openai-types.ts",
|
||||
"codegen": "tsx src/server/scripts/client-codegen.ts",
|
||||
"seed": "tsx prisma/seed.ts",
|
||||
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'"
|
||||
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'",
|
||||
"test": "pnpm vitest --no-threads"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.5.8",
|
||||
@@ -36,6 +37,7 @@
|
||||
"@monaco-editor/loader": "^1.3.3",
|
||||
"@next-auth/prisma-adapter": "^1.0.5",
|
||||
"@prisma/client": "^4.14.0",
|
||||
"@sentry/nextjs": "^7.61.0",
|
||||
"@t3-oss/env-nextjs": "^0.3.1",
|
||||
"@tabler/icons-react": "^2.22.0",
|
||||
"@tanstack/react-query": "^4.29.7",
|
||||
@@ -48,6 +50,7 @@
|
||||
"chroma-js": "^2.4.2",
|
||||
"concurrently": "^8.2.0",
|
||||
"cors": "^2.8.5",
|
||||
"crypto-random-string": "^5.0.0",
|
||||
"dayjs": "^1.11.8",
|
||||
"dedent": "^1.0.1",
|
||||
"dotenv": "^16.3.1",
|
||||
@@ -60,14 +63,19 @@
|
||||
"json-schema-to-typescript": "^13.0.2",
|
||||
"json-stringify-pretty-compact": "^4.0.0",
|
||||
"jsonschema": "^1.4.1",
|
||||
"kysely": "^0.26.1",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "^0.265.0",
|
||||
"next": "^13.4.2",
|
||||
"next-auth": "^4.22.1",
|
||||
"next-query-params": "^4.2.3",
|
||||
"nextjs-cors": "^2.1.2",
|
||||
"nextjs-routes": "^2.0.1",
|
||||
"openai": "4.0.0-beta.7",
|
||||
"pg": "^8.11.2",
|
||||
"pluralize": "^8.0.0",
|
||||
"posthog-js": "^1.68.4",
|
||||
"posthog-js": "^1.75.3",
|
||||
"posthog-node": "^3.1.1",
|
||||
"prettier": "^3.0.0",
|
||||
"prismjs": "^1.29.0",
|
||||
"react": "18.2.0",
|
||||
@@ -80,10 +88,12 @@
|
||||
"react-syntax-highlighter": "^15.5.0",
|
||||
"react-textarea-autosize": "^8.5.0",
|
||||
"recast": "^0.23.3",
|
||||
"recharts": "^2.7.2",
|
||||
"replicate": "^0.12.3",
|
||||
"socket.io": "^4.7.1",
|
||||
"socket.io-client": "^4.7.1",
|
||||
"superjson": "1.12.2",
|
||||
"trpc-openapi": "^1.2.0",
|
||||
"tsx": "^3.12.7",
|
||||
"type-fest": "^4.0.0",
|
||||
"use-query-params": "^2.2.1",
|
||||
@@ -103,6 +113,7 @@
|
||||
"@types/json-schema": "^7.0.12",
|
||||
"@types/lodash-es": "^4.17.8",
|
||||
"@types/node": "^18.16.0",
|
||||
"@types/pg": "^8.10.2",
|
||||
"@types/pluralize": "^0.0.30",
|
||||
"@types/prismjs": "^1.26.0",
|
||||
"@types/react": "^18.2.6",
|
||||
1387
pnpm-lock.yaml → app/pnpm-lock.yaml
generated
@@ -0,0 +1,5 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "UserRole" AS ENUM ('ADMIN', 'USER');
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "User" ADD COLUMN "role" "UserRole" NOT NULL DEFAULT 'USER';
|
||||
@@ -0,0 +1,28 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Dataset" (
|
||||
"id" UUID NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"organizationId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "Dataset_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatasetEntry" (
|
||||
"id" UUID NOT NULL,
|
||||
"input" TEXT NOT NULL,
|
||||
"output" TEXT,
|
||||
"datasetId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "DatasetEntry_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Dataset" ADD CONSTRAINT "Dataset_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "DatasetEntry" ADD CONSTRAINT "DatasetEntry_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,13 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `constructFn` on the `PromptVariant` table. All the data in the column will be lost.
|
||||
- You are about to drop the column `constructFnVersion` on the `PromptVariant` table. All the data in the column will be lost.
|
||||
- Added the required column `promptConstructor` to the `PromptVariant` table without a default value. This is not possible if the table is not empty.
|
||||
- Added the required column `promptConstructorVersion` to the `PromptVariant` table without a default value. This is not possible if the table is not empty.
|
||||
|
||||
*/
|
||||
-- AlterTable
|
||||
|
||||
ALTER TABLE "PromptVariant" RENAME COLUMN "constructFn" TO "promptConstructor";
|
||||
ALTER TABLE "PromptVariant" RENAME COLUMN "constructFnVersion" TO "promptConstructorVersion";
|
||||
@@ -0,0 +1,90 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "LoggedCall" (
|
||||
"id" UUID NOT NULL,
|
||||
"startTime" TIMESTAMP(3) NOT NULL,
|
||||
"cacheHit" BOOLEAN NOT NULL,
|
||||
"modelResponseId" UUID NOT NULL,
|
||||
"organizationId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "LoggedCall_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "LoggedCallModelResponse" (
|
||||
"id" UUID NOT NULL,
|
||||
"reqPayload" JSONB NOT NULL,
|
||||
"respStatus" INTEGER,
|
||||
"respPayload" JSONB,
|
||||
"error" TEXT,
|
||||
"startTime" TIMESTAMP(3) NOT NULL,
|
||||
"endTime" TIMESTAMP(3) NOT NULL,
|
||||
"cacheKey" TEXT,
|
||||
"durationMs" INTEGER,
|
||||
"inputTokens" INTEGER,
|
||||
"outputTokens" INTEGER,
|
||||
"finishReason" TEXT,
|
||||
"completionId" TEXT,
|
||||
"totalCost" DECIMAL(18,12),
|
||||
"originalLoggedCallId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "LoggedCallModelResponse_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "LoggedCallTag" (
|
||||
"id" UUID NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT,
|
||||
"loggedCallId" UUID NOT NULL,
|
||||
|
||||
CONSTRAINT "LoggedCallTag_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApiKey" (
|
||||
"id" UUID NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"apiKey" TEXT NOT NULL,
|
||||
"organizationId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "ApiKey_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCall_startTime_idx" ON "LoggedCall"("startTime");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "LoggedCallModelResponse_originalLoggedCallId_key" ON "LoggedCallModelResponse"("originalLoggedCallId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCallModelResponse_cacheKey_idx" ON "LoggedCallModelResponse"("cacheKey");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCallTag_name_idx" ON "LoggedCallTag"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCallTag_name_value_idx" ON "LoggedCallTag"("name", "value");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApiKey_apiKey_key" ON "ApiKey"("apiKey");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LoggedCall" ADD CONSTRAINT "LoggedCall_modelResponseId_fkey" FOREIGN KEY ("modelResponseId") REFERENCES "LoggedCallModelResponse"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LoggedCall" ADD CONSTRAINT "LoggedCall_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LoggedCallModelResponse" ADD CONSTRAINT "LoggedCallModelResponse_originalLoggedCallId_fkey" FOREIGN KEY ("originalLoggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LoggedCallTag" ADD CONSTRAINT "LoggedCallTag_loggedCallId_fkey" FOREIGN KEY ("loggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ApiKey" ADD CONSTRAINT "ApiKey_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Organization" ADD COLUMN "name" TEXT NOT NULL DEFAULT 'Project 1';
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCall" ALTER COLUMN "modelResponseId" DROP NOT NULL;
|
||||
@@ -0,0 +1,37 @@
|
||||
-- Rename Enum
|
||||
ALTER TYPE "OrganizationUserRole" RENAME TO "ProjectUserRole";
|
||||
|
||||
-- Drop and recreate foreign keys
|
||||
ALTER TABLE "ApiKey" DROP CONSTRAINT "ApiKey_organizationId_fkey";
|
||||
ALTER TABLE "Dataset" DROP CONSTRAINT "Dataset_organizationId_fkey";
|
||||
ALTER TABLE "Experiment" DROP CONSTRAINT "Experiment_organizationId_fkey";
|
||||
ALTER TABLE "LoggedCall" DROP CONSTRAINT "LoggedCall_organizationId_fkey";
|
||||
ALTER TABLE "OrganizationUser" DROP CONSTRAINT "OrganizationUser_organizationId_fkey";
|
||||
ALTER TABLE "OrganizationUser" DROP CONSTRAINT "OrganizationUser_userId_fkey";
|
||||
|
||||
-- Rename columns
|
||||
ALTER TABLE "ApiKey" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "Dataset" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "Experiment" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "LoggedCall" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "OrganizationUser" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "Organization" RENAME COLUMN "personalOrgUserId" TO "personalProjectUserId";
|
||||
|
||||
-- Rename table
|
||||
ALTER TABLE "Organization" RENAME TO "Project";
|
||||
ALTER TABLE "OrganizationUser" RENAME TO "ProjectUser";
|
||||
|
||||
-- Recreate foreign keys
|
||||
ALTER TABLE "Experiment" ADD CONSTRAINT "Experiment_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "Dataset" ADD CONSTRAINT "Dataset_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "ProjectUser" ADD CONSTRAINT "ProjectUser_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "ProjectUser" ADD CONSTRAINT "ProjectUser_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "LoggedCall" ADD CONSTRAINT "LoggedCall_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "ApiKey" ADD CONSTRAINT "ApiKey_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- Rename indexes
|
||||
ALTER TABLE "Project" RENAME CONSTRAINT "Organization_pkey" TO "Project_pkey";
|
||||
ALTER TABLE "ProjectUser" RENAME CONSTRAINT "OrganizationUser_pkey" TO "ProjectUser_pkey";
|
||||
ALTER TABLE "Project" RENAME CONSTRAINT "Organization_personalOrgUserId_fkey" TO "Project_personalProjectUserId_fkey";
|
||||
ALTER INDEX "Organization_personalOrgUserId_key" RENAME TO "Project_personalProjectUserId_key";
|
||||
ALTER INDEX "OrganizationUser_organizationId_userId_key" RENAME TO "ProjectUser_projectId_userId_key";
|
||||
@@ -16,8 +16,8 @@ model Experiment {
|
||||
|
||||
sortIndex Int @default(0)
|
||||
|
||||
organizationId String @db.Uuid
|
||||
organization Organization? @relation(fields: [organizationId], references: [id], onDelete: Cascade)
|
||||
projectId String @db.Uuid
|
||||
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
@@ -31,11 +31,11 @@ model Experiment {
|
||||
model PromptVariant {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
label String
|
||||
constructFn String
|
||||
constructFnVersion Int
|
||||
model String
|
||||
modelProvider String
|
||||
label String
|
||||
promptConstructor String
|
||||
promptConstructorVersion Int
|
||||
model String
|
||||
modelProvider String
|
||||
|
||||
uiId String @default(uuid()) @db.Uuid
|
||||
visible Boolean @default(true)
|
||||
@@ -174,30 +174,61 @@ model OutputEvaluation {
|
||||
@@unique([modelResponseId, evaluationId])
|
||||
}
|
||||
|
||||
model Organization {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
personalOrgUserId String? @unique @db.Uuid
|
||||
PersonalOrgUser User? @relation(fields: [personalOrgUserId], references: [id], onDelete: Cascade)
|
||||
model Dataset {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
organizationUsers OrganizationUser[]
|
||||
experiments Experiment[]
|
||||
name String
|
||||
datasetEntries DatasetEntry[]
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
enum OrganizationUserRole {
|
||||
model DatasetEntry {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
input String
|
||||
output String?
|
||||
|
||||
datasetId String @db.Uuid
|
||||
dataset Dataset? @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Project {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String @default("Project 1")
|
||||
|
||||
personalProjectUserId String? @unique @db.Uuid
|
||||
personalProjectUser User? @relation(fields: [personalProjectUserId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
projectUsers ProjectUser[]
|
||||
experiments Experiment[]
|
||||
datasets Dataset[]
|
||||
loggedCalls LoggedCall[]
|
||||
apiKeys ApiKey[]
|
||||
}
|
||||
|
||||
enum ProjectUserRole {
|
||||
ADMIN
|
||||
MEMBER
|
||||
VIEWER
|
||||
}
|
||||
|
||||
model OrganizationUser {
|
||||
model ProjectUser {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
role OrganizationUserRole
|
||||
role ProjectUserRole
|
||||
|
||||
organizationId String @db.Uuid
|
||||
organization Organization? @relation(fields: [organizationId], references: [id], onDelete: Cascade)
|
||||
projectId String @db.Uuid
|
||||
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
userId String @db.Uuid
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
@@ -205,7 +236,7 @@ model OrganizationUser {
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([organizationId, userId])
|
||||
@@unique([projectId, userId])
|
||||
}
|
||||
|
||||
model WorldChampEntrant {
|
||||
@@ -222,6 +253,99 @@ model WorldChampEntrant {
|
||||
@@unique([userId])
|
||||
}
|
||||
|
||||
model LoggedCall {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
startTime DateTime
|
||||
|
||||
// True if this call was served from the cache, false otherwise
|
||||
cacheHit Boolean
|
||||
|
||||
// A LoggedCall is always associated with a LoggedCallModelResponse. If this
|
||||
// is a cache miss, we create a new LoggedCallModelResponse.
|
||||
// If it's a cache hit, it's a pre-existing LoggedCallModelResponse.
|
||||
modelResponseId String? @db.Uuid
|
||||
modelResponse LoggedCallModelResponse? @relation(fields: [modelResponseId], references: [id], onDelete: Cascade)
|
||||
|
||||
// The responses created by this LoggedCall. Will be empty if this LoggedCall was a cache hit.
|
||||
createdResponses LoggedCallModelResponse[] @relation(name: "ModelResponseOriginalCall")
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
tags LoggedCallTag[]
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([startTime])
|
||||
}
|
||||
|
||||
model LoggedCallModelResponse {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
reqPayload Json
|
||||
|
||||
// The HTTP status returned by the model provider
|
||||
respStatus Int?
|
||||
respPayload Json?
|
||||
|
||||
// Should be null if the request was successful, and some string if the request failed.
|
||||
error String?
|
||||
|
||||
startTime DateTime
|
||||
endTime DateTime
|
||||
|
||||
// Note: the function to calculate the cacheKey should include the project
|
||||
// ID so we don't share cached responses between projects, which could be an
|
||||
// attack vector. Also, we should only set the cacheKey on the model if the
|
||||
// request was successful.
|
||||
cacheKey String?
|
||||
|
||||
// Derived fields
|
||||
durationMs Int?
|
||||
inputTokens Int?
|
||||
outputTokens Int?
|
||||
finishReason String?
|
||||
completionId String?
|
||||
totalCost Decimal? @db.Decimal(18, 12)
|
||||
|
||||
// The LoggedCall that created this LoggedCallModelResponse
|
||||
originalLoggedCallId String @unique @db.Uuid
|
||||
originalLoggedCall LoggedCall @relation(name: "ModelResponseOriginalCall", fields: [originalLoggedCallId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
loggedCalls LoggedCall[]
|
||||
|
||||
@@index([cacheKey])
|
||||
}
|
||||
|
||||
model LoggedCallTag {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String
|
||||
value String?
|
||||
|
||||
loggedCallId String @db.Uuid
|
||||
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([name])
|
||||
@@index([name, value])
|
||||
}
|
||||
|
||||
model ApiKey {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
name String
|
||||
apiKey String @unique
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Account {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
userId String @db.Uuid
|
||||
@@ -249,16 +373,24 @@ model Session {
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
}
|
||||
|
||||
enum UserRole {
|
||||
ADMIN
|
||||
USER
|
||||
}
|
||||
|
||||
model User {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String?
|
||||
email String? @unique
|
||||
emailVerified DateTime?
|
||||
image String?
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String?
|
||||
email String? @unique
|
||||
emailVerified DateTime?
|
||||
image String?
|
||||
|
||||
role UserRole @default(USER)
|
||||
|
||||
accounts Account[]
|
||||
sessions Session[]
|
||||
organizationUsers OrganizationUser[]
|
||||
organizations Organization[]
|
||||
projectUsers ProjectUser[]
|
||||
projects Project[]
|
||||
worldChampEntrant WorldChampEntrant?
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
@@ -1,17 +1,18 @@
|
||||
import { prisma } from "~/server/db";
|
||||
import dedent from "dedent";
|
||||
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||
import { promptConstructorVersion } from "~/promptConstructor/version";
|
||||
|
||||
const defaultId = "11111111-1111-1111-1111-111111111111";
|
||||
|
||||
await prisma.organization.deleteMany({
|
||||
await prisma.project.deleteMany({
|
||||
where: { id: defaultId },
|
||||
});
|
||||
|
||||
// If there's an existing org, just seed into it
|
||||
const org =
|
||||
(await prisma.organization.findFirst({})) ??
|
||||
(await prisma.organization.create({
|
||||
// If there's an existing project, just seed into it
|
||||
const project =
|
||||
(await prisma.project.findFirst({})) ??
|
||||
(await prisma.project.create({
|
||||
data: { id: defaultId },
|
||||
}));
|
||||
|
||||
@@ -25,7 +26,7 @@ await prisma.experiment.create({
|
||||
data: {
|
||||
id: defaultId,
|
||||
label: "Country Capitals Example",
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -51,8 +52,8 @@ await prisma.promptVariant.createMany({
|
||||
sortIndex: 0,
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
modelProvider: "openai/ChatCompletion",
|
||||
constructFnVersion: 1,
|
||||
constructFn: dedent`
|
||||
promptConstructorVersion,
|
||||
promptConstructor: dedent`
|
||||
definePrompt("openai/ChatCompletion", {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
@@ -70,8 +71,8 @@ await prisma.promptVariant.createMany({
|
||||
sortIndex: 1,
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
modelProvider: "openai/ChatCompletion",
|
||||
constructFnVersion: 1,
|
||||
constructFn: dedent`
|
||||
promptConstructorVersion,
|
||||
promptConstructor: dedent`
|
||||
definePrompt("openai/ChatCompletion", {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
@@ -3,17 +3,18 @@ import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||
import dedent from "dedent";
|
||||
import { execSync } from "child_process";
|
||||
import fs from "fs";
|
||||
import { promptConstructorVersion } from "~/promptConstructor/version";
|
||||
|
||||
const defaultId = "11111111-1111-1111-1111-111111111112";
|
||||
|
||||
await prisma.organization.deleteMany({
|
||||
await prisma.project.deleteMany({
|
||||
where: { id: defaultId },
|
||||
});
|
||||
|
||||
// If there's an existing org, just seed into it
|
||||
const org =
|
||||
(await prisma.organization.findFirst({})) ??
|
||||
(await prisma.organization.create({
|
||||
// If there's an existing project, just seed into it
|
||||
const project =
|
||||
(await prisma.project.findFirst({})) ??
|
||||
(await prisma.project.create({
|
||||
data: { id: defaultId },
|
||||
}));
|
||||
|
||||
@@ -46,7 +47,7 @@ for (const dataset of datasets) {
|
||||
const oldExperiment = await prisma.experiment.findFirst({
|
||||
where: {
|
||||
label: experimentName,
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
if (oldExperiment) {
|
||||
@@ -59,7 +60,7 @@ for (const dataset of datasets) {
|
||||
data: {
|
||||
id: oldExperiment?.id ?? undefined,
|
||||
label: experimentName,
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -98,8 +99,8 @@ for (const dataset of datasets) {
|
||||
sortIndex: 0,
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
modelProvider: "openai/ChatCompletion",
|
||||
constructFnVersion: 1,
|
||||
constructFn: dedent`
|
||||
promptConstructorVersion,
|
||||
promptConstructor: dedent`
|
||||
definePrompt("openai/ChatCompletion", {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
410
app/prisma/seedDashboard.ts
Normal file
@@ -2,17 +2,18 @@ import { prisma } from "~/server/db";
|
||||
import dedent from "dedent";
|
||||
import fs from "fs";
|
||||
import { parse } from "csv-parse/sync";
|
||||
import { promptConstructorVersion } from "~/promptConstructor/version";
|
||||
|
||||
const defaultId = "11111111-1111-1111-1111-111111111112";
|
||||
|
||||
await prisma.organization.deleteMany({
|
||||
await prisma.project.deleteMany({
|
||||
where: { id: defaultId },
|
||||
});
|
||||
|
||||
// If there's an existing org, just seed into it
|
||||
const org =
|
||||
(await prisma.organization.findFirst({})) ??
|
||||
(await prisma.organization.create({
|
||||
// If there's an existing project, just seed into it
|
||||
const project =
|
||||
(await prisma.project.findFirst({})) ??
|
||||
(await prisma.project.create({
|
||||
data: { id: defaultId },
|
||||
}));
|
||||
|
||||
@@ -26,7 +27,7 @@ const experimentName = `Twitter Sentiment Analysis`;
|
||||
const oldExperiment = await prisma.experiment.findFirst({
|
||||
where: {
|
||||
label: experimentName,
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
if (oldExperiment) {
|
||||
@@ -39,7 +40,7 @@ const experiment = await prisma.experiment.create({
|
||||
data: {
|
||||
id: oldExperiment?.id ?? undefined,
|
||||
label: experimentName,
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -85,8 +86,8 @@ await prisma.promptVariant.createMany({
|
||||
sortIndex: 0,
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
modelProvider: "openai/ChatCompletion",
|
||||
constructFnVersion: 1,
|
||||
constructFn: dedent`
|
||||
promptConstructorVersion,
|
||||
promptConstructor: dedent`
|
||||
definePrompt("openai/ChatCompletion", {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 6.8 KiB After Width: | Height: | Size: 6.8 KiB |
|
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 22 KiB |
|
Before Width: | Height: | Size: 6.1 KiB After Width: | Height: | Size: 6.1 KiB |
|
Before Width: | Height: | Size: 704 B After Width: | Height: | Size: 704 B |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 3.0 KiB After Width: | Height: | Size: 3.0 KiB |
|
Before Width: | Height: | Size: 858 B After Width: | Height: | Size: 858 B |
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 62 KiB After Width: | Height: | Size: 62 KiB |
@@ -5,6 +5,9 @@ set -e
|
||||
echo "Migrating the database"
|
||||
pnpm prisma migrate deploy
|
||||
|
||||
echo "Migrating promptConstructors"
|
||||
pnpm tsx src/promptConstructor/migrate.ts
|
||||
|
||||
echo "Starting the server"
|
||||
|
||||
pnpm concurrently --kill-others \
|
||||
33
app/sentry.client.config.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
// This file configures the initialization of Sentry on the client.
|
||||
// The config you add here will be used whenever a users loads a page in their browser.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
if (env.NEXT_PUBLIC_SENTRY_DSN) {
|
||||
Sentry.init({
|
||||
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
|
||||
// Adjust this value in production, or use tracesSampler for greater control
|
||||
tracesSampleRate: 1,
|
||||
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
|
||||
replaysOnErrorSampleRate: 1.0,
|
||||
|
||||
// This sets the sample rate to be 10%. You may want this to be 100% while
|
||||
// in development and sample at a lower rate in production
|
||||
replaysSessionSampleRate: 0.1,
|
||||
|
||||
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
|
||||
integrations: [
|
||||
new Sentry.Replay({
|
||||
// Additional Replay configuration goes in here, for example:
|
||||
maskAllText: true,
|
||||
blockAllMedia: true,
|
||||
}),
|
||||
],
|
||||
});
|
||||
}
|
||||
19
app/sentry.edge.config.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
// This file configures the initialization of Sentry for edge features (middleware, edge routes, and so on).
|
||||
// The config you add here will be used whenever one of the edge features is loaded.
|
||||
// Note that this config is unrelated to the Vercel Edge Runtime and is also required when running locally.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
if (env.NEXT_PUBLIC_SENTRY_DSN) {
|
||||
Sentry.init({
|
||||
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
|
||||
// Adjust this value in production, or use tracesSampler for greater control
|
||||
tracesSampleRate: 1,
|
||||
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
});
|
||||
}
|
||||
18
app/sentry.server.config.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
// This file configures the initialization of Sentry on the server.
|
||||
// The config you add here will be used whenever the server handles a request.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
if (env.NEXT_PUBLIC_SENTRY_DSN) {
|
||||
Sentry.init({
|
||||
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
|
||||
// Adjust this value in production, or use tracesSampler for greater control
|
||||
tracesSampleRate: 1,
|
||||
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
});
|
||||
}
|
||||
@@ -68,7 +68,7 @@ export const ChangeModelModal = ({
|
||||
return;
|
||||
await replaceVariantMutation.mutateAsync({
|
||||
id: variant.id,
|
||||
constructFn: modifiedPromptFn,
|
||||
promptConstructor: modifiedPromptFn,
|
||||
streamScenarios: visibleScenarios,
|
||||
});
|
||||
await utils.promptVariants.list.invalidate();
|
||||
@@ -107,7 +107,7 @@ export const ChangeModelModal = ({
|
||||
<ModelSearch selectedModel={selectedModel} setSelectedModel={setSelectedModel} />
|
||||
{isString(modifiedPromptFn) && (
|
||||
<CompareFunctions
|
||||
originalFunction={variant.constructFn}
|
||||
originalFunction={variant.promptConstructor}
|
||||
newFunction={modifiedPromptFn}
|
||||
leftTitle={originalLabel}
|
||||
rightTitle={convertedLabel}
|
||||
40
app/src/components/CopiableCode.tsx
Normal file
@@ -0,0 +1,40 @@
|
||||
import { HStack, Icon, IconButton, Tooltip, Text } from "@chakra-ui/react";
|
||||
import { useState } from "react";
|
||||
import { MdContentCopy } from "react-icons/md";
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
|
||||
const CopiableCode = ({ code }: { code: string }) => {
|
||||
const [copied, setCopied] = useState(false);
|
||||
|
||||
const [copyToClipboard] = useHandledAsyncCallback(async () => {
|
||||
await navigator.clipboard.writeText(code);
|
||||
setCopied(true);
|
||||
}, [code]);
|
||||
return (
|
||||
<HStack
|
||||
backgroundColor="blackAlpha.800"
|
||||
color="white"
|
||||
borderRadius={4}
|
||||
padding={3}
|
||||
w="full"
|
||||
justifyContent="space-between"
|
||||
>
|
||||
<Text fontFamily="inconsolata" fontWeight="bold" letterSpacing={0.5}>
|
||||
{code}
|
||||
</Text>
|
||||
<Tooltip closeOnClick={false} label={copied ? "Copied!" : "Copy to clipboard"}>
|
||||
<IconButton
|
||||
aria-label="Copy"
|
||||
icon={<Icon as={MdContentCopy} boxSize={5} />}
|
||||
size="xs"
|
||||
colorScheme="white"
|
||||
variant="ghost"
|
||||
onClick={copyToClipboard}
|
||||
onMouseLeave={() => setCopied(false)}
|
||||
/>
|
||||
</Tooltip>
|
||||
</HStack>
|
||||
);
|
||||
};
|
||||
|
||||
export default CopiableCode;
|
||||
@@ -1,18 +1,29 @@
|
||||
import { Button, Spinner, InputGroup, InputRightElement, Icon, HStack } from "@chakra-ui/react";
|
||||
import {
|
||||
Button,
|
||||
Spinner,
|
||||
InputGroup,
|
||||
InputRightElement,
|
||||
Icon,
|
||||
HStack,
|
||||
type InputGroupProps,
|
||||
} from "@chakra-ui/react";
|
||||
import { IoMdSend } from "react-icons/io";
|
||||
import AutoResizeTextArea from "../AutoResizeTextArea";
|
||||
import AutoResizeTextArea from "./AutoResizeTextArea";
|
||||
|
||||
export const CustomInstructionsInput = ({
|
||||
instructions,
|
||||
setInstructions,
|
||||
loading,
|
||||
onSubmit,
|
||||
placeholder = "Send custom instructions",
|
||||
...props
|
||||
}: {
|
||||
instructions: string;
|
||||
setInstructions: (instructions: string) => void;
|
||||
loading: boolean;
|
||||
onSubmit: () => void;
|
||||
}) => {
|
||||
placeholder?: string;
|
||||
} & InputGroupProps) => {
|
||||
return (
|
||||
<InputGroup
|
||||
size="md"
|
||||
@@ -22,6 +33,7 @@ export const CustomInstructionsInput = ({
|
||||
borderRadius={8}
|
||||
alignItems="center"
|
||||
colorScheme="orange"
|
||||
{...props}
|
||||
>
|
||||
<AutoResizeTextArea
|
||||
value={instructions}
|
||||
@@ -33,7 +45,7 @@ export const CustomInstructionsInput = ({
|
||||
onSubmit();
|
||||
}
|
||||
}}
|
||||
placeholder="Send custom instructions"
|
||||
placeholder={placeholder}
|
||||
py={4}
|
||||
pl={4}
|
||||
pr={12}
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
import { useRouter } from "next/router";
|
||||
import { useRef } from "react";
|
||||
import { BsTrash } from "react-icons/bs";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
|
||||
@@ -23,6 +24,8 @@ export const DeleteButton = () => {
|
||||
const utils = api.useContext();
|
||||
const router = useRouter();
|
||||
|
||||
const closeDrawer = useAppStore((s) => s.closeDrawer);
|
||||
|
||||
const { isOpen, onOpen, onClose } = useDisclosure();
|
||||
const cancelRef = useRef<HTMLButtonElement>(null);
|
||||
|
||||
@@ -31,6 +34,8 @@ export const DeleteButton = () => {
|
||||
await mutation.mutateAsync({ id: experiment.data.id });
|
||||
await utils.experiments.list.invalidate();
|
||||
await router.push({ pathname: "/experiments" });
|
||||
closeDrawer();
|
||||
|
||||
onClose();
|
||||
}, [mutation, experiment.data?.id, router]);
|
||||
|
||||
@@ -88,7 +88,7 @@ export default function OutputCell({
|
||||
)}
|
||||
</VStack>
|
||||
),
|
||||
[hardRefetching, hardRefetch, mostRecentResponse, scenario],
|
||||
[hardRefetching, hardRefetch, mostRecentResponse, scenario, cell],
|
||||
);
|
||||
|
||||
if (!vars) return null;
|
||||
@@ -191,7 +191,7 @@ export default function OutputCell({
|
||||
|
||||
return (
|
||||
<CellWrapper>
|
||||
<Text>{contentToDisplay}</Text>
|
||||
<Text whiteSpace="pre-wrap">{contentToDisplay}</Text>
|
||||
</CellWrapper>
|
||||
);
|
||||
}
|
||||
@@ -23,8 +23,15 @@ export const OutputStats = ({
|
||||
const completionTokens = modelResponse.completionTokens;
|
||||
|
||||
return (
|
||||
<HStack w="full" align="center" color="gray.500" fontSize="2xs" mt={{ base: 0, md: 1 }}>
|
||||
<HStack flex={1}>
|
||||
<HStack
|
||||
w="full"
|
||||
align="center"
|
||||
color="gray.500"
|
||||
fontSize="2xs"
|
||||
mt={{ base: 0, md: 1 }}
|
||||
alignItems="flex-end"
|
||||
>
|
||||
<HStack flex={1} flexWrap="wrap">
|
||||
{modelResponse.outputEvaluations.map((evaluation) => {
|
||||
const passed = evaluation.result > 0.5;
|
||||
return (
|
||||
@@ -18,13 +18,13 @@ export const CellOptions = ({
|
||||
const modalDisclosure = useDisclosure();
|
||||
|
||||
return (
|
||||
<HStack justifyContent="flex-end" w="full">
|
||||
<HStack justifyContent="flex-end" w="full" spacing={1}>
|
||||
{cell && (
|
||||
<>
|
||||
<Tooltip label="See Prompt">
|
||||
<IconButton
|
||||
aria-label="See Prompt"
|
||||
icon={<Icon as={BsInfoCircle} boxSize={4} />}
|
||||
icon={<Icon as={BsInfoCircle} boxSize={3.5} />}
|
||||
onClick={modalDisclosure.onOpen}
|
||||
size="xs"
|
||||
colorScheme="gray"
|
||||
21
app/src/components/OutputsTable/ScenarioPaginator.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
import { useScenarios } from "~/utils/hooks";
|
||||
import Paginator from "../Paginator";
|
||||
|
||||
const ScenarioPaginator = () => {
|
||||
const { data } = useScenarios();
|
||||
|
||||
if (!data) return null;
|
||||
|
||||
const { scenarios, startIndex, lastPage, count } = data;
|
||||
|
||||
return (
|
||||
<Paginator
|
||||
numItemsLoaded={scenarios.length}
|
||||
startIndex={startIndex}
|
||||
lastPage={lastPage}
|
||||
count={count}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default ScenarioPaginator;
|
||||
@@ -47,7 +47,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||
return () => window.removeEventListener("keydown", handleEsc);
|
||||
}, [isFullscreen, toggleFullscreen]);
|
||||
|
||||
const lastSavedFn = props.variant.constructFn;
|
||||
const lastSavedFn = props.variant.promptConstructor;
|
||||
|
||||
const modifierKey = useModifierKeyLabel();
|
||||
|
||||
@@ -96,7 +96,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||
|
||||
const resp = await replaceVariant.mutateAsync({
|
||||
id: props.variant.id,
|
||||
constructFn: currentFn,
|
||||
promptConstructor: currentFn,
|
||||
streamScenarios: visibleScenarios,
|
||||
});
|
||||
if (resp.status === "error") {
|
||||
@@ -43,12 +43,12 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
||||
return (
|
||||
<HStack
|
||||
justifyContent="space-between"
|
||||
alignItems="center"
|
||||
alignItems="flex-end"
|
||||
mx="2"
|
||||
fontSize="xs"
|
||||
py={cellPadding.y}
|
||||
>
|
||||
<HStack px={cellPadding.x}>
|
||||
<HStack px={cellPadding.x} flexWrap="wrap">
|
||||
{showNumFinished && (
|
||||
<Text>
|
||||
{data.outputCount} / {data.scenarioCount}
|
||||
@@ -35,7 +35,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
||||
pb={24}
|
||||
pl={8}
|
||||
display="grid"
|
||||
gridTemplateColumns={`250px repeat(${variants.data.length}, minmax(360px, 1fr)) auto`}
|
||||
gridTemplateColumns={`250px repeat(${variants.data.length}, minmax(320px, 1fr)) auto`}
|
||||
sx={{
|
||||
"> *": {
|
||||
borderColor: "gray.300",
|
||||
@@ -5,15 +5,20 @@ import {
|
||||
BsChevronLeft,
|
||||
BsChevronRight,
|
||||
} from "react-icons/bs";
|
||||
import { usePage, useScenarios } from "~/utils/hooks";
|
||||
import { usePage } from "~/utils/hooks";
|
||||
|
||||
const ScenarioPaginator = () => {
|
||||
const Paginator = ({
|
||||
numItemsLoaded,
|
||||
startIndex,
|
||||
lastPage,
|
||||
count,
|
||||
}: {
|
||||
numItemsLoaded: number;
|
||||
startIndex: number;
|
||||
lastPage: number;
|
||||
count: number;
|
||||
}) => {
|
||||
const [page, setPage] = usePage();
|
||||
const { data } = useScenarios();
|
||||
|
||||
if (!data) return null;
|
||||
|
||||
const { scenarios, startIndex, lastPage, count } = data;
|
||||
|
||||
const nextPage = () => {
|
||||
if (page < lastPage) {
|
||||
@@ -49,7 +54,7 @@ const ScenarioPaginator = () => {
|
||||
icon={<BsChevronLeft />}
|
||||
/>
|
||||
<Box>
|
||||
{startIndex}-{startIndex + scenarios.length - 1} / {count}
|
||||
{startIndex}-{startIndex + numItemsLoaded - 1} / {count}
|
||||
</Box>
|
||||
<IconButton
|
||||
variant="ghost"
|
||||
@@ -71,4 +76,4 @@ const ScenarioPaginator = () => {
|
||||
);
|
||||
};
|
||||
|
||||
export default ScenarioPaginator;
|
||||
export default Paginator;
|
||||
@@ -20,7 +20,7 @@ import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||
import { type PromptVariant } from "@prisma/client";
|
||||
import { useState } from "react";
|
||||
import CompareFunctions from "./CompareFunctions";
|
||||
import { CustomInstructionsInput } from "./CustomInstructionsInput";
|
||||
import { CustomInstructionsInput } from "../CustomInstructionsInput";
|
||||
import { RefineAction } from "./RefineAction";
|
||||
import { isObject, isString } from "lodash-es";
|
||||
import { type RefinementAction, type SupportedProvider } from "~/modelProviders/types";
|
||||
@@ -73,7 +73,7 @@ export const RefinePromptModal = ({
|
||||
return;
|
||||
await replaceVariantMutation.mutateAsync({
|
||||
id: variant.id,
|
||||
constructFn: refinedPromptFn,
|
||||
promptConstructor: refinedPromptFn,
|
||||
streamScenarios: visibleScenarios,
|
||||
});
|
||||
await utils.promptVariants.list.invalidate();
|
||||
@@ -122,11 +122,11 @@ export const RefinePromptModal = ({
|
||||
instructions={instructions}
|
||||
setInstructions={setInstructions}
|
||||
loading={modificationInProgress}
|
||||
onSubmit={getModifiedPromptFn}
|
||||
onSubmit={() => getModifiedPromptFn()}
|
||||
/>
|
||||
</VStack>
|
||||
<CompareFunctions
|
||||
originalFunction={variant.constructFn}
|
||||
originalFunction={variant.promptConstructor}
|
||||
newFunction={isString(refinedPromptFn) ? refinedPromptFn : undefined}
|
||||
maxH="40vh"
|
||||
/>
|
||||
26
app/src/components/StatsCard.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import { VStack, HStack, type StackProps, Text, Divider } from "@chakra-ui/react";
|
||||
import Link, { type LinkProps } from "next/link";
|
||||
|
||||
const StatsCard = ({
|
||||
title,
|
||||
href,
|
||||
children,
|
||||
...rest
|
||||
}: { title: string; href: string } & StackProps & LinkProps) => {
|
||||
return (
|
||||
<VStack flex={1} borderWidth={1} padding={4} borderRadius={4} borderColor="gray.300" {...rest}>
|
||||
<HStack w="full" justifyContent="space-between">
|
||||
<Text fontSize="md" fontWeight="bold">
|
||||
{title}
|
||||
</Text>
|
||||
<Link href={href}>
|
||||
<Text color="blue">View all</Text>
|
||||
</Link>
|
||||
</HStack>
|
||||
<Divider />
|
||||
{children}
|
||||
</VStack>
|
||||
);
|
||||
};
|
||||
|
||||
export default StatsCard;
|
||||