Compare commits

..

44 Commits

Author SHA1 Message Date
Kyle Corbitt
d82782adb4 Number experiments based only on current org
Previously we were naming each new experiment based on the highest existing sort index globally, which doesn't make sense. Better to just use the local one.
2023-08-05 09:26:55 -07:00
Kyle Corbitt
e10589abff Rename constructFn to promptConstructor
It's a clearer name. Also reorganize the filesystem so all the promptConstructor related files are colocated.
2023-08-04 23:09:39 -07:00
Kyle Corbitt
01dcbfc896 Rename 'anthropic' to 'anthropic/completion' (#120)
More consistency in the way we name our model providers.
2023-08-04 22:07:23 -07:00
Kyle Corbitt
50e0b34d30 newer replicate models 2023-08-04 21:18:52 -07:00
arcticfly
44bb9fc58d Add outputs to entry generation (#119) 2023-08-04 16:14:49 -07:00
David Corbitt
c0d3784f0c Merge branch 'main' of github.com:corbt/prompt-lab 2023-08-04 16:06:45 -07:00
David Corbitt
e522026b71 Embold star 2023-08-04 16:06:34 -07:00
arcticfly
46b13d85b7 Update README.md 2023-08-04 12:00:38 -07:00
arcticfly
c12aa82a3e Update README.md 2023-08-04 11:58:47 -07:00
arcticfly
b98bce8944 Add Datasets (#118)
* Add dataset (without entries)

* Fix dataset hook

* Add dataset rows

* Add buttons to import/generate data

* Add GenerateDataModal

* Autogenerate and save data

* Fix prettier

* Fix types

* Add dataset pagination

* Fix prettier

* Use useDisclosure

* Allow generate data modal fadeaway

* hide/show data in env var

* Fix prettier
2023-08-04 11:52:03 -07:00
arcticfly
f045c80dfd Update README.md 2023-08-03 18:31:24 -07:00
arcticfly
3b460dff2a Update README.md 2023-08-03 18:16:54 -07:00
David Corbitt
5fa5732804 Move demo up 2023-08-03 12:02:10 -07:00
arcticfly
28e6e2b9df Wrap evals (#117)
* Wrap eval outputs

* Fix prettier

* Decrease variant minWidth
2023-08-03 11:58:39 -07:00
Kyle Corbitt
54d1df4442 upload sourcemaps 2023-08-03 11:53:13 -07:00
David Corbitt
f69c2b5f23 Fix prettier 2023-08-03 11:48:05 -07:00
David Corbitt
51f0666f6a Add table of contents to README 2023-08-03 11:40:29 -07:00
Kyle Corbitt
b67d974f4c Merge pull request #116 from OpenPipe/sentry
Add Sentry
2023-08-03 10:23:22 -07:00
Kyle Corbitt
33fb2db981 Add Sentry
Visibility into errors in prod
2023-08-03 10:18:17 -07:00
Kyle Corbitt
e391379c3e Merge pull request #115 from OpenPipe/admin
Add admin role
2023-08-03 09:39:00 -07:00
Kyle Corbitt
8d1609dd52 Add admin role
Allow privileged users to administer the system.
2023-08-03 09:35:13 -07:00
David Corbitt
f3380f302d Simplify world champs screen 2023-08-02 23:57:44 -07:00
David Corbitt
3dba9c7ee1 Update posthog version 2023-08-02 23:30:15 -07:00
David Corbitt
e0e4f7a9d6 Fix mobile table padding 2023-08-02 23:08:49 -07:00
arcticfly
48293dc579 Add link to demo experiment (#114) 2023-08-02 22:50:09 -07:00
arcticfly
38ac6243a0 Add server posthog events (#113) 2023-08-02 14:21:07 -07:00
arcticfly
bd2f58e2a5 Improve posthog (#112)
* Add SessionIdentifier

* Identify by id

* Rewrite posthog events

* Add NEXT_PUBLIC_HOST to dockerfile

* Fix default url

* Move SessionIdentifier into analytics file
2023-08-02 13:30:25 -07:00
Kyle Corbitt
808e47c6b9 Merge pull request #111 from OpenPipe/gh-btn
Update TopNavbar component to include a GitHub button
2023-08-02 10:15:26 -07:00
Kyle Corbitt
5945f0ed6b Update TopNavbar component to include a GitHub button 2023-08-02 10:11:41 -07:00
arcticfly
6bc7d76d15 Update README.md 2023-08-02 00:59:05 -07:00
arcticfly
e9ed173e34 Update README.md 2023-08-02 00:57:24 -07:00
arcticfly
75d58d7021 Update README.md 2023-08-02 00:56:19 -07:00
arcticfly
896c8c5c57 Update README.md 2023-08-02 00:51:57 -07:00
arcticfly
ec5547d0b0 Update README.md with new features and gifs (#110) 2023-08-02 00:46:48 -07:00
Kyle Corbitt
77e4e3b8c3 mobile styles 2023-08-01 23:08:35 -07:00
Kyle Corbitt
a1b03ddad1 Merge pull request #109 from OpenPipe/debug-prompts
Add debug modal for output cells
2023-08-01 22:51:39 -07:00
Kyle Corbitt
6be32bea4c Add debug modal for output cells
See the actual input that a model got for a specific cell. The formatting isn't great right now; should probably iterate on that.
2023-08-01 22:49:38 -07:00
arcticfly
72c70e2a55 Improve conversion to/from Claude (#108)
* Increase min width of prompt variant

* Increase width of custom instructions input

* Start recording API docs

* Provide better instructions for converting to/from Claude

* Fix prettier
2023-08-01 21:03:23 -07:00
arcticfly
026532f2c2 Model selection styling changes (#107)
* Model selection styling changes

* Fix prettier
2023-08-01 18:45:15 -07:00
Kyle Corbitt
f88538336f fix types 2023-08-01 18:31:34 -07:00
Kyle Corbitt
3c7178115e Merge pull request #105 from OpenPipe/bump-models
Bump Replicate models
2023-08-01 18:26:16 -07:00
Kyle Corbitt
292aaf090a Merge pull request #106 from OpenPipe/dark-mode
Update global background color in ChakraThemeProvider
2023-08-01 18:25:57 -07:00
Kyle Corbitt
d9915dc41b Update global background color in ChakraThemeProvider 2023-08-01 18:25:29 -07:00
David Corbitt
3560bcff14 Correct time stamps on waiting message 2023-08-01 18:09:23 -07:00
101 changed files with 2630 additions and 758 deletions

View File

@@ -26,6 +26,8 @@ NEXT_PUBLIC_SOCKET_URL="http://localhost:3318"
NEXTAUTH_SECRET="your_secret"
NEXTAUTH_URL="http://localhost:3000"
NEXT_PUBLIC_HOST="http://localhost:3000"
# Next Auth Github Provider
GITHUB_CLIENT_ID="your_client_id"
GITHUB_CLIENT_SECRET="your_secret"

3
.gitignore vendored
View File

@@ -40,3 +40,6 @@ yarn-error.log*
# typescript
*.tsbuildinfo
# Sentry Auth Token
.sentryclirc

View File

@@ -14,10 +14,14 @@ declare module "nextjs-routes" {
| StaticRoute<"/account/signin">
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
| StaticRoute<"/api/experiments/og-image">
| StaticRoute<"/api/sentry-example-api">
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
| DynamicRoute<"/data/[id]", { "id": string }>
| StaticRoute<"/data">
| DynamicRoute<"/experiments/[id]", { "id": string }>
| StaticRoute<"/experiments">
| StaticRoute<"/">
| StaticRoute<"/sentry-example-page">
| StaticRoute<"/world-champs">
| StaticRoute<"/world-champs/signup">;

View File

@@ -20,6 +20,9 @@ FROM base as builder
# Include all NEXT_PUBLIC_* env vars here
ARG NEXT_PUBLIC_POSTHOG_KEY
ARG NEXT_PUBLIC_SOCKET_URL
ARG NEXT_PUBLIC_HOST
ARG NEXT_PUBLIC_SENTRY_DSN
ARG SENTRY_AUTH_TOKEN
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules

View File

@@ -1,52 +1,62 @@
<img src="https://github.com/openpipe/openpipe/assets/41524992/ca59596e-eb80-40f9-921f-6d67f6e6d8fa" width="72px" />
<!-- <img src="https://github.com/openpipe/openpipe/assets/41524992/ca59596e-eb80-40f9-921f-6d67f6e6d8fa" width="72px" /> -->
# OpenPipe
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts with realistic sample data.
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts, and can automatically [translate](#-translate-between-model-apis) those prompts between models.
<img src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="demo">
You can use our hosted version of OpenPipe at https://openpipe.ai. You can also clone this repository and [run it locally](#running-locally).
## Sample Experiments
These are simple experiments users have created that show how OpenPipe works.
These are simple experiments users have created that show how OpenPipe works. Feel free to fork them and start experimenting yourself.
- [Country Capitals](https://app.openpipe.ai/experiments/11111111-1111-1111-1111-111111111111)
- [Twitter Sentiment Analysis](https://app.openpipe.ai/experiments/62c20a73-2012-4a64-973c-4b665ad46a57)
- [Reddit User Needs](https://app.openpipe.ai/experiments/22222222-2222-2222-2222-222222222222)
- [OpenAI Function Calls](https://app.openpipe.ai/experiments/2ebbdcb3-ed51-456e-87dc-91f72eaf3e2b)
- [Activity Classification](https://app.openpipe.ai/experiments/3950940f-ab6b-4b74-841d-7e9dbc4e4ff8)
<img src="https://github.com/openpipe/openpipe/assets/176426/fc7624c6-5b65-4d4d-82b7-4a816f3e5678" alt="demo" height="400px">
You can use our hosted version of OpenPipe at [https://openpipe.ai]. You can also clone this repository and [run it locally](#running-locally).
## High-Level Features
**Configure Multiple Prompts**
Set up multiple prompt configurations and compare their output side-by-side. Each configuration can be configured independently.
**Visualize Responses**
Inspect prompt completions side-by-side.
**Test Many Inputs**
OpenPipe lets you _template_ a prompt. Use the templating feature to run the prompts you're testing against many potential inputs for broader coverage of your problem space than you'd get with manual testing.
**🪄 Auto-generate Test Scenarios**
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
**Prompt Validation and Typeahead**
We use OpenAI's OpenAPI spec to automatically provide typeahead and validate prompts.
<img alt="typeahead" src="https://github.com/openpipe/openpipe/assets/176426/acc638f8-d851-4742-8d01-fe6f98890840" height="300px">
**Function Call Support**
Natively supports [OpenAI function calls](https://openai.com/blog/function-calling-and-other-api-updates) on supported models.
<img height="300px" alt="function calls" src="https://github.com/openpipe/openpipe/assets/176426/48ad13fe-af2f-4294-bf32-62015597fd9b">
## Supported Models
- All models available through the OpenAI [chat completion API](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
- Llama2 [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat), [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat), [70b chat](https://replicate.com/replicate/llama70b-v2-chat).
- Anthropic's [Claude 1 Instant](https://www.anthropic.com/index/introducing-claude) and [Claude 2](https://www.anthropic.com/index/claude-2)
## Features
### 🔍 Visualize Responses
Inspect prompt completions side-by-side.
### 🧪 Bulk-Test
OpenPipe lets you _template_ a prompt. Use the templating feature to run the prompts you're testing against many potential inputs for broad coverage of your problem space.
### 📟 Translate between Model APIs
Write your prompt in one format and automatically convert it to work with any other model.
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/1e19ccf2-96b6-4e93-a3a5-1449710d1b5b" alt="translate between models">
<br><br>
### 🛠️ Refine Your Prompts Automatically
Use a growing database of best-practice refinements to improve your prompts automatically.
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/87a27fe7-daef-445c-a5e2-1c82b23f9f99" alt="add function call">
<br><br>
### 🪄 Auto-generate Test Scenarios
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
<img width="600" src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="auto-generate">
<br><br>
## Running Locally
1. Install [Postgresql](https://www.postgresql.org/download/).

View File

@@ -1,13 +1,14 @@
import nextRoutes from "nextjs-routes/config";
import { withSentryConfig } from "@sentry/nextjs";
/**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially useful
* for Docker builds.
*/
await import("./src/env.mjs");
const { env } = await import("./src/env.mjs");
/** @type {import("next").NextConfig} */
const config = {
let config = {
reactStrictMode: true,
/**
@@ -21,6 +22,13 @@ const config = {
defaultLocale: "en",
},
rewrites: async () => [
{
source: "/ingest/:path*",
destination: "https://app.posthog.com/:path*",
},
],
webpack: (config) => {
config.module.rules.push({
test: /\.txt$/,
@@ -30,4 +38,24 @@ const config = {
},
};
export default nextRoutes()(config);
config = nextRoutes()(config);
if (env.NEXT_PUBLIC_SENTRY_DSN && env.SENTRY_AUTH_TOKEN) {
// @ts-expect-error - `withSentryConfig` is not typed correctly
config = withSentryConfig(
config,
{
authToken: env.SENTRY_AUTH_TOKEN,
silent: true,
org: "openpipe",
project: "openpipe",
},
{
widenClientFileUpload: true,
tunnelRoute: "/monitoring",
disableLogger: true,
},
);
}
export default config;

View File

@@ -18,15 +18,18 @@
"start": "next start",
"codegen": "tsx src/codegen/export-openai-types.ts",
"seed": "tsx prisma/seed.ts",
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'"
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'",
"test": "pnpm vitest --no-threads"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.5.8",
"@apidevtools/json-schema-ref-parser": "^10.1.0",
"@babel/preset-typescript": "^7.22.5",
"@babel/standalone": "^7.22.9",
"@chakra-ui/anatomy": "^2.2.0",
"@chakra-ui/next-js": "^2.1.4",
"@chakra-ui/react": "^2.7.1",
"@chakra-ui/styled-system": "^2.9.1",
"@emotion/react": "^11.11.1",
"@emotion/server": "^11.11.0",
"@emotion/styled": "^11.11.0",
@@ -34,6 +37,7 @@
"@monaco-editor/loader": "^1.3.3",
"@next-auth/prisma-adapter": "^1.0.5",
"@prisma/client": "^4.14.0",
"@sentry/nextjs": "^7.61.0",
"@t3-oss/env-nextjs": "^0.3.1",
"@tabler/icons-react": "^2.22.0",
"@tanstack/react-query": "^4.29.7",
@@ -63,15 +67,18 @@
"next-auth": "^4.22.1",
"next-query-params": "^4.2.3",
"nextjs-routes": "^2.0.1",
"openai": "4.0.0-beta.2",
"openai": "4.0.0-beta.7",
"pluralize": "^8.0.0",
"posthog-js": "^1.68.4",
"posthog-js": "^1.75.3",
"posthog-node": "^3.1.1",
"prettier": "^3.0.0",
"prismjs": "^1.29.0",
"react": "18.2.0",
"react-diff-viewer": "^3.1.1",
"react-dom": "18.2.0",
"react-github-btn": "^1.4.0",
"react-icons": "^4.10.1",
"react-json-tree": "^0.18.0",
"react-select": "^5.7.4",
"react-syntax-highlighter": "^15.5.0",
"react-textarea-autosize": "^8.5.0",

579
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,5 @@
-- CreateEnum
CREATE TYPE "UserRole" AS ENUM ('ADMIN', 'USER');
-- AlterTable
ALTER TABLE "User" ADD COLUMN "role" "UserRole" NOT NULL DEFAULT 'USER';

View File

@@ -0,0 +1,28 @@
-- CreateTable
CREATE TABLE "Dataset" (
"id" UUID NOT NULL,
"name" TEXT NOT NULL,
"organizationId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "Dataset_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "DatasetEntry" (
"id" UUID NOT NULL,
"input" TEXT NOT NULL,
"output" TEXT,
"datasetId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "DatasetEntry_pkey" PRIMARY KEY ("id")
);
-- AddForeignKey
ALTER TABLE "Dataset" ADD CONSTRAINT "Dataset_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "DatasetEntry" ADD CONSTRAINT "DatasetEntry_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -0,0 +1,13 @@
/*
Warnings:
- You are about to drop the column `constructFn` on the `PromptVariant` table. All the data in the column will be lost.
- You are about to drop the column `constructFnVersion` on the `PromptVariant` table. All the data in the column will be lost.
- Added the required column `promptConstructor` to the `PromptVariant` table without a default value. This is not possible if the table is not empty.
- Added the required column `promptConstructorVersion` to the `PromptVariant` table without a default value. This is not possible if the table is not empty.
*/
-- AlterTable
ALTER TABLE "PromptVariant" RENAME COLUMN "constructFn" TO "promptConstructor";
ALTER TABLE "PromptVariant" RENAME COLUMN "constructFnVersion" TO "promptConstructorVersion";

View File

@@ -31,11 +31,11 @@ model Experiment {
model PromptVariant {
id String @id @default(uuid()) @db.Uuid
label String
constructFn String
constructFnVersion Int
model String
modelProvider String
label String
promptConstructor String
promptConstructorVersion Int
model String
modelProvider String
uiId String @default(uuid()) @db.Uuid
visible Boolean @default(true)
@@ -174,6 +174,32 @@ model OutputEvaluation {
@@unique([modelResponseId, evaluationId])
}
model Dataset {
id String @id @default(uuid()) @db.Uuid
name String
datasetEntries DatasetEntry[]
organizationId String @db.Uuid
organization Organization @relation(fields: [organizationId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model DatasetEntry {
id String @id @default(uuid()) @db.Uuid
input String
output String?
datasetId String @db.Uuid
dataset Dataset? @relation(fields: [datasetId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model Organization {
id String @id @default(uuid()) @db.Uuid
personalOrgUserId String? @unique @db.Uuid
@@ -183,6 +209,7 @@ model Organization {
updatedAt DateTime @updatedAt
organizationUsers OrganizationUser[]
experiments Experiment[]
datasets Dataset[]
}
enum OrganizationUserRole {
@@ -249,12 +276,20 @@ model Session {
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
}
enum UserRole {
ADMIN
USER
}
model User {
id String @id @default(uuid()) @db.Uuid
name String?
email String? @unique
emailVerified DateTime?
image String?
id String @id @default(uuid()) @db.Uuid
name String?
email String? @unique
emailVerified DateTime?
image String?
role UserRole @default(USER)
accounts Account[]
sessions Session[]
organizationUsers OrganizationUser[]

View File

@@ -1,6 +1,7 @@
import { prisma } from "~/server/db";
import dedent from "dedent";
import { generateNewCell } from "~/server/utils/generateNewCell";
import { promptConstructorVersion } from "~/promptConstructor/version";
const defaultId = "11111111-1111-1111-1111-111111111111";
@@ -51,8 +52,8 @@ await prisma.promptVariant.createMany({
sortIndex: 0,
model: "gpt-3.5-turbo-0613",
modelProvider: "openai/ChatCompletion",
constructFnVersion: 1,
constructFn: dedent`
promptConstructorVersion,
promptConstructor: dedent`
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo-0613",
messages: [
@@ -70,8 +71,8 @@ await prisma.promptVariant.createMany({
sortIndex: 1,
model: "gpt-3.5-turbo-0613",
modelProvider: "openai/ChatCompletion",
constructFnVersion: 1,
constructFn: dedent`
promptConstructorVersion,
promptConstructor: dedent`
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo-0613",
messages: [

View File

@@ -3,6 +3,7 @@ import { generateNewCell } from "~/server/utils/generateNewCell";
import dedent from "dedent";
import { execSync } from "child_process";
import fs from "fs";
import { promptConstructorVersion } from "~/promptConstructor/version";
const defaultId = "11111111-1111-1111-1111-111111111112";
@@ -98,8 +99,8 @@ for (const dataset of datasets) {
sortIndex: 0,
model: "gpt-3.5-turbo-0613",
modelProvider: "openai/ChatCompletion",
constructFnVersion: 1,
constructFn: dedent`
promptConstructorVersion,
promptConstructor: dedent`
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo-0613",
messages: [

View File

@@ -2,6 +2,7 @@ import { prisma } from "~/server/db";
import dedent from "dedent";
import fs from "fs";
import { parse } from "csv-parse/sync";
import { promptConstructorVersion } from "~/promptConstructor/version";
const defaultId = "11111111-1111-1111-1111-111111111112";
@@ -85,8 +86,8 @@ await prisma.promptVariant.createMany({
sortIndex: 0,
model: "gpt-3.5-turbo-0613",
modelProvider: "openai/ChatCompletion",
constructFnVersion: 1,
constructFn: dedent`
promptConstructorVersion,
promptConstructor: dedent`
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo-0613",
messages: [

View File

@@ -5,6 +5,9 @@ set -e
echo "Migrating the database"
pnpm prisma migrate deploy
echo "Migrating promptConstructors"
pnpm tsx src/promptConstructor/migrate.ts
echo "Starting the server"
pnpm concurrently --kill-others \

33
sentry.client.config.ts Normal file
View File

@@ -0,0 +1,33 @@
// This file configures the initialization of Sentry on the client.
// The config you add here will be used whenever a users loads a page in their browser.
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
import * as Sentry from "@sentry/nextjs";
import { env } from "~/env.mjs";
if (env.NEXT_PUBLIC_SENTRY_DSN) {
Sentry.init({
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
replaysOnErrorSampleRate: 1.0,
// This sets the sample rate to be 10%. You may want this to be 100% while
// in development and sample at a lower rate in production
replaysSessionSampleRate: 0.1,
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
integrations: [
new Sentry.Replay({
// Additional Replay configuration goes in here, for example:
maskAllText: true,
blockAllMedia: true,
}),
],
});
}

19
sentry.edge.config.ts Normal file
View File

@@ -0,0 +1,19 @@
// This file configures the initialization of Sentry for edge features (middleware, edge routes, and so on).
// The config you add here will be used whenever one of the edge features is loaded.
// Note that this config is unrelated to the Vercel Edge Runtime and is also required when running locally.
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
import * as Sentry from "@sentry/nextjs";
import { env } from "~/env.mjs";
if (env.NEXT_PUBLIC_SENTRY_DSN) {
Sentry.init({
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
});
}

18
sentry.server.config.ts Normal file
View File

@@ -0,0 +1,18 @@
// This file configures the initialization of Sentry on the server.
// The config you add here will be used whenever the server handles a request.
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
import * as Sentry from "@sentry/nextjs";
import { env } from "~/env.mjs";
if (env.NEXT_PUBLIC_SENTRY_DSN) {
Sentry.init({
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
});
}

View File

@@ -68,7 +68,7 @@ export const ChangeModelModal = ({
return;
await replaceVariantMutation.mutateAsync({
id: variant.id,
constructFn: modifiedPromptFn,
promptConstructor: modifiedPromptFn,
streamScenarios: visibleScenarios,
});
await utils.promptVariants.list.invalidate();
@@ -107,7 +107,7 @@ export const ChangeModelModal = ({
<ModelSearch selectedModel={selectedModel} setSelectedModel={setSelectedModel} />
{isString(modifiedPromptFn) && (
<CompareFunctions
originalFunction={variant.constructFn}
originalFunction={variant.promptConstructor}
newFunction={modifiedPromptFn}
leftTitle={originalLabel}
rightTitle={convertedLabel}

View File

@@ -22,8 +22,8 @@ export const ModelSearch = (props: {
const [containerRef, containerDimensions] = useElementDimensions();
return (
<VStack ref={containerRef as LegacyRef<HTMLDivElement>} w="full">
<Text>Browse Models</Text>
<VStack ref={containerRef as LegacyRef<HTMLDivElement>} w="full" fontFamily="inconsolata">
<Text fontWeight="bold">Browse Models</Text>
<Select<ProviderModel>
styles={{ control: (provided) => ({ ...provided, width: containerDimensions?.width }) }}
getOptionLabel={(data) => modelLabel(data.provider, data.model)}

View File

@@ -23,16 +23,24 @@ export const ModelStatsCard = ({
{label}
</Text>
<VStack w="full" spacing={6} bgColor="gray.100" p={4} borderRadius={4}>
<VStack
w="full"
spacing={6}
borderWidth={1}
borderColor="gray.300"
p={4}
borderRadius={8}
fontFamily="inconsolata"
>
<HStack w="full" align="flex-start">
<Text flex={1} fontSize="lg">
<Text as="span" color="gray.600">
{model.provider} /{" "}
</Text>
<VStack flex={1} fontSize="lg" alignItems="flex-start">
<Text as="span" fontWeight="bold" color="gray.900">
{model.name}
</Text>
</Text>
<Text as="span" color="gray.600" fontSize="sm">
Provider: {model.provider}
</Text>
</VStack>
<Link
href={model.learnMoreUrl}
isExternal

View File

@@ -1,18 +1,29 @@
import { Button, Spinner, InputGroup, InputRightElement, Icon, HStack } from "@chakra-ui/react";
import {
Button,
Spinner,
InputGroup,
InputRightElement,
Icon,
HStack,
type InputGroupProps,
} from "@chakra-ui/react";
import { IoMdSend } from "react-icons/io";
import AutoResizeTextArea from "../AutoResizeTextArea";
import AutoResizeTextArea from "./AutoResizeTextArea";
export const CustomInstructionsInput = ({
instructions,
setInstructions,
loading,
onSubmit,
placeholder = "Send custom instructions",
...props
}: {
instructions: string;
setInstructions: (instructions: string) => void;
loading: boolean;
onSubmit: () => void;
}) => {
placeholder?: string;
} & InputGroupProps) => {
return (
<InputGroup
size="md"
@@ -22,6 +33,7 @@ export const CustomInstructionsInput = ({
borderRadius={8}
alignItems="center"
colorScheme="orange"
{...props}
>
<AutoResizeTextArea
value={instructions}
@@ -33,7 +45,7 @@ export const CustomInstructionsInput = ({
onSubmit();
}
}}
placeholder="Send custom instructions"
placeholder={placeholder}
py={4}
pl={4}
pr={12}

View File

@@ -1,19 +0,0 @@
import { type StackProps, VStack } from "@chakra-ui/react";
import { CellOptions } from "./CellOptions";
export const CellContent = ({
hardRefetch,
hardRefetching,
children,
...props
}: {
hardRefetch: () => void;
hardRefetching: boolean;
} & StackProps) => (
<VStack w="full" alignItems="flex-start" {...props}>
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} />
<VStack w="full" alignItems="flex-start" maxH={500} overflowY="auto">
{children}
</VStack>
</VStack>
);

View File

@@ -1,37 +0,0 @@
import { Button, HStack, Icon, Spinner, Tooltip } from "@chakra-ui/react";
import { BsArrowClockwise } from "react-icons/bs";
import { useExperimentAccess } from "~/utils/hooks";
export const CellOptions = ({
refetchingOutput,
refetchOutput,
}: {
refetchingOutput: boolean;
refetchOutput: () => void;
}) => {
const { canModify } = useExperimentAccess();
return (
<HStack justifyContent="flex-end" w="full">
{canModify && (
<Tooltip label="Refetch output" aria-label="refetch output">
<Button
size="xs"
w={4}
h={4}
py={4}
px={4}
minW={0}
borderRadius={8}
color="gray.500"
variant="ghost"
cursor="pointer"
onClick={refetchOutput}
aria-label="refetch output"
>
<Icon as={refetchingOutput ? Spinner : BsArrowClockwise} boxSize={4} />
</Button>
</Tooltip>
)}
</HStack>
);
};

View File

@@ -1,17 +1,17 @@
import { api } from "~/utils/api";
import { type PromptVariant, type Scenario } from "../types";
import { Text, VStack } from "@chakra-ui/react";
import { type StackProps, Text, VStack } from "@chakra-ui/react";
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
import SyntaxHighlighter from "react-syntax-highlighter";
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
import stringify from "json-stringify-pretty-compact";
import { type ReactElement, useState, useEffect, Fragment } from "react";
import { type ReactElement, useState, useEffect, Fragment, useCallback } from "react";
import useSocket from "~/utils/useSocket";
import { OutputStats } from "./OutputStats";
import { RetryCountdown } from "./RetryCountdown";
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
import { ResponseLog } from "./ResponseLog";
import { CellContent } from "./CellContent";
import { CellOptions } from "./TopActions";
const WAITING_MESSAGE_INTERVAL = 20000;
@@ -72,37 +72,49 @@ export default function OutputCell({
// TODO: disconnect from socket if we're not streaming anymore
const streamedMessage = useSocket<OutputSchema>(cell?.id);
const mostRecentResponse = cell?.modelResponses[cell.modelResponses.length - 1];
const CellWrapper = useCallback(
({ children, ...props }: StackProps) => (
<VStack w="full" alignItems="flex-start" {...props} px={2} py={2} h="100%">
{cell && (
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} cell={cell} />
)}
<VStack w="full" alignItems="flex-start" maxH={500} overflowY="auto" flex={1}>
{children}
</VStack>
{mostRecentResponse && (
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
)}
</VStack>
),
[hardRefetching, hardRefetch, mostRecentResponse, scenario, cell],
);
if (!vars) return null;
if (!cell && !fetchingOutput)
return (
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
<CellWrapper>
<Text color="gray.500">Error retrieving output</Text>
</CellContent>
</CellWrapper>
);
if (cell && cell.errorMessage) {
return (
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
<CellWrapper>
<Text color="red.500">{cell.errorMessage}</Text>
</CellContent>
</CellWrapper>
);
}
if (disabledReason) return <Text color="gray.500">{disabledReason}</Text>;
const mostRecentResponse = cell?.modelResponses[cell.modelResponses.length - 1];
const showLogs = !streamedMessage && !mostRecentResponse?.output;
if (showLogs)
return (
<CellContent
hardRefetching={hardRefetching}
hardRefetch={hardRefetch}
alignItems="flex-start"
fontFamily="inconsolata, monospace"
spacing={0}
>
<CellWrapper alignItems="flex-start" fontFamily="inconsolata, monospace" spacing={0}>
{cell?.jobQueuedAt && <ResponseLog time={cell.jobQueuedAt} title="Job queued" />}
{cell?.jobStartedAt && <ResponseLog time={cell.jobStartedAt} title="Job started" />}
{cell?.modelResponses?.map((response) => {
@@ -124,9 +136,13 @@ export default function OutputCell({
Array.from({ length: numWaitingMessages }, (_, i) => (
<ResponseLog
key={`waiting-${i}`}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
time={new Date(response.requestedAt!.getTime() + i * WAITING_MESSAGE_INTERVAL)}
title="Waiting for response"
time={
new Date(
(response.requestedAt?.getTime?.() ?? 0) +
(i + 1) * WAITING_MESSAGE_INTERVAL,
)
}
title="Waiting for response..."
/>
))}
{response.receivedAt && (
@@ -144,7 +160,7 @@ export default function OutputCell({
{mostRecentResponse?.retryTime && (
<RetryCountdown retryTime={mostRecentResponse.retryTime} />
)}
</CellContent>
</CellWrapper>
);
const normalizedOutput = mostRecentResponse?.output
@@ -155,50 +171,27 @@ export default function OutputCell({
if (mostRecentResponse?.output && normalizedOutput?.type === "json") {
return (
<VStack
w="100%"
h="100%"
fontSize="xs"
flexWrap="wrap"
overflowX="hidden"
justifyContent="space-between"
>
<CellContent
hardRefetching={hardRefetching}
hardRefetch={hardRefetch}
w="full"
flex={1}
spacing={0}
<CellWrapper>
<SyntaxHighlighter
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
language="json"
style={docco}
lineProps={{
style: { wordBreak: "break-all", whiteSpace: "pre-wrap" },
}}
wrapLines
>
<SyntaxHighlighter
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
language="json"
style={docco}
lineProps={{
style: { wordBreak: "break-all", whiteSpace: "pre-wrap" },
}}
wrapLines
>
{stringify(normalizedOutput.value, { maxLength: 40 })}
</SyntaxHighlighter>
</CellContent>
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
</VStack>
{stringify(normalizedOutput.value, { maxLength: 40 })}
</SyntaxHighlighter>
</CellWrapper>
);
}
const contentToDisplay = (normalizedOutput?.type === "text" && normalizedOutput.value) || "";
return (
<VStack w="100%" h="100%" justifyContent="space-between" whiteSpace="pre-wrap">
<VStack w="full" alignItems="flex-start" spacing={0}>
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
<Text>{contentToDisplay}</Text>
</CellContent>
</VStack>
{mostRecentResponse?.output && (
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
)}
</VStack>
<CellWrapper>
<Text>{contentToDisplay}</Text>
</CellWrapper>
);
}

View File

@@ -23,8 +23,15 @@ export const OutputStats = ({
const completionTokens = modelResponse.completionTokens;
return (
<HStack w="full" align="center" color="gray.500" fontSize="2xs" mt={{ base: 0, md: 1 }}>
<HStack flex={1}>
<HStack
w="full"
align="center"
color="gray.500"
fontSize="2xs"
mt={{ base: 0, md: 1 }}
alignItems="flex-end"
>
<HStack flex={1} flexWrap="wrap">
{modelResponse.outputEvaluations.map((evaluation) => {
const passed = evaluation.result > 0.5;
return (

View File

@@ -0,0 +1,36 @@
import {
Modal,
ModalBody,
ModalCloseButton,
ModalContent,
ModalHeader,
ModalOverlay,
type UseDisclosureReturn,
} from "@chakra-ui/react";
import { type RouterOutputs } from "~/utils/api";
import { JSONTree } from "react-json-tree";
export default function ExpandedModal(props: {
cell: NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>;
disclosure: UseDisclosureReturn;
}) {
return (
<Modal isOpen={props.disclosure.isOpen} onClose={props.disclosure.onClose} size="2xl">
<ModalOverlay />
<ModalContent>
<ModalHeader>Prompt</ModalHeader>
<ModalCloseButton />
<ModalBody>
<JSONTree
data={props.cell.prompt}
invertTheme={true}
theme="chalk"
shouldExpandNodeInitially={() => true}
getItemString={() => ""}
hideRoot
/>
</ModalBody>
</ModalContent>
</Modal>
);
}

View File

@@ -0,0 +1,53 @@
import { HStack, Icon, IconButton, Spinner, Tooltip, useDisclosure } from "@chakra-ui/react";
import { BsArrowClockwise, BsInfoCircle } from "react-icons/bs";
import { useExperimentAccess } from "~/utils/hooks";
import ExpandedModal from "./PromptModal";
import { type RouterOutputs } from "~/utils/api";
export const CellOptions = ({
cell,
refetchingOutput,
refetchOutput,
}: {
cell: RouterOutputs["scenarioVariantCells"]["get"];
refetchingOutput: boolean;
refetchOutput: () => void;
}) => {
const { canModify } = useExperimentAccess();
const modalDisclosure = useDisclosure();
return (
<HStack justifyContent="flex-end" w="full">
{cell && (
<>
<Tooltip label="See Prompt">
<IconButton
aria-label="See Prompt"
icon={<Icon as={BsInfoCircle} boxSize={4} />}
onClick={modalDisclosure.onOpen}
size="xs"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
<ExpandedModal cell={cell} disclosure={modalDisclosure} />
</>
)}
{canModify && (
<Tooltip label="Refetch output">
<IconButton
size="xs"
color="gray.500"
variant="ghost"
cursor="pointer"
onClick={refetchOutput}
aria-label="refetch output"
icon={<Icon as={refetchingOutput ? Spinner : BsArrowClockwise} boxSize={4} />}
/>
</Tooltip>
)}
</HStack>
);
};

View File

@@ -1,9 +1,8 @@
import { useEffect, type DragEvent } from "react";
import { api } from "~/utils/api";
import { isEqual } from "lodash-es";
import { type Scenario } from "./types";
import { useEffect, useState, type DragEvent } from "react";
import { api } from "~/utils/api";
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
import { useState } from "react";
import { type Scenario } from "./types";
import {
Box,
@@ -12,14 +11,12 @@ import {
Icon,
IconButton,
Spinner,
Stack,
Text,
Tooltip,
VStack,
Text,
} from "@chakra-ui/react";
import { cellPadding } from "../constants";
import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
import { RiDraggable } from "react-icons/ri";
import { cellPadding } from "../constants";
import { FloatingLabelInput } from "./FloatingLabelInput";
import { ScenarioEditorModal } from "./ScenarioEditorModal";
@@ -115,60 +112,44 @@ export default function ScenarioEditor({
onDrop={onReorder}
backgroundColor={isDragTarget ? "gray.100" : "transparent"}
>
{canModify && props.canHide && (
<Stack
alignSelf="flex-start"
opacity={props.hovered ? 1 : 0}
spacing={0}
ml={-cellPadding.x}
>
<Tooltip label="Hide scenario" hasArrow>
{/* for some reason the tooltip can't position itself properly relative to the icon without the wrapping box */}
<Button
variant="unstyled"
color="gray.400"
height="unset"
width="unset"
minW="unset"
onClick={onHide}
_hover={{
color: "gray.800",
cursor: "pointer",
}}
>
<Icon as={hidingInProgress ? Spinner : BsX} boxSize={hidingInProgress ? 4 : 6} />
</Button>
</Tooltip>
<Icon
as={RiDraggable}
boxSize={6}
color="gray.400"
_hover={{ color: "gray.800", cursor: "pointer" }}
/>
</Stack>
)}
{variableLabels.length === 0 ? (
<Box color="gray.500">
{vars.data ? "No scenario variables configured" : "Loading..."}
</Box>
) : (
<VStack spacing={4} flex={1} py={2}>
<HStack justifyContent="space-between" w="100%">
<Text color="gray.500">Scenario</Text>
<IconButton
className="fullscreen-toggle"
aria-label="Maximize"
icon={<BsArrowsAngleExpand />}
onClick={() => setScenarioEditorModalOpen(true)}
boxSize={6}
borderRadius={4}
p={1.5}
minW={0}
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
<HStack justifyContent="space-between" w="100%" align="center" spacing={0}>
<Text flex={1}>Scenario</Text>
<Tooltip label="Expand" hasArrow>
<IconButton
aria-label="Expand"
icon={<Icon as={BsArrowsAngleExpand} boxSize={3} />}
onClick={() => setScenarioEditorModalOpen(true)}
size="xs"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
{canModify && props.canHide && (
<Tooltip label="Delete" hasArrow>
<IconButton
aria-label="Delete"
icon={
<Icon
as={hidingInProgress ? Spinner : BsX}
boxSize={hidingInProgress ? 4 : 6}
/>
}
onClick={onHide}
size="xs"
display="flex"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
)}
</HStack>
{variableLabels.map((key) => {
const value = values[key] ?? "";

View File

@@ -1,7 +1,6 @@
import {
Button,
HStack,
Icon,
Modal,
ModalBody,
ModalCloseButton,
@@ -14,7 +13,6 @@ import {
VStack,
} from "@chakra-ui/react";
import { useEffect, useState } from "react";
import { BsFileTextFill } from "react-icons/bs";
import { isEqual } from "lodash-es";
import { api } from "~/utils/api";
@@ -60,8 +58,6 @@ export const ScenarioEditorModal = ({
await utils.scenarios.list.invalidate();
}, [mutation, values]);
console.log("scenario", scenario);
const vars = api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
const variableLabels = vars.data?.map((v) => v.label) ?? [];
@@ -73,12 +69,7 @@ export const ScenarioEditorModal = ({
>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={BsFileTextFill} />
<Text>Scenario</Text>
</HStack>
</ModalHeader>
<ModalHeader />
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack spacing={8}>

View File

@@ -1,73 +1,20 @@
import { Box, HStack, IconButton } from "@chakra-ui/react";
import {
BsChevronDoubleLeft,
BsChevronDoubleRight,
BsChevronLeft,
BsChevronRight,
} from "react-icons/bs";
import { usePage, useScenarios } from "~/utils/hooks";
import { useScenarios } from "~/utils/hooks";
import Paginator from "../Paginator";
const ScenarioPaginator = () => {
const [page, setPage] = usePage();
const { data } = useScenarios();
if (!data) return null;
const { scenarios, startIndex, lastPage, count } = data;
const nextPage = () => {
if (page < lastPage) {
setPage(page + 1, "replace");
}
};
const prevPage = () => {
if (page > 1) {
setPage(page - 1, "replace");
}
};
const goToLastPage = () => setPage(lastPage, "replace");
const goToFirstPage = () => setPage(1, "replace");
return (
<HStack pt={4}>
<IconButton
variant="ghost"
size="sm"
onClick={goToFirstPage}
isDisabled={page === 1}
aria-label="Go to first page"
icon={<BsChevronDoubleLeft />}
/>
<IconButton
variant="ghost"
size="sm"
onClick={prevPage}
isDisabled={page === 1}
aria-label="Previous page"
icon={<BsChevronLeft />}
/>
<Box>
{startIndex}-{startIndex + scenarios.length - 1} / {count}
</Box>
<IconButton
variant="ghost"
size="sm"
onClick={nextPage}
isDisabled={page === lastPage}
aria-label="Next page"
icon={<BsChevronRight />}
/>
<IconButton
variant="ghost"
size="sm"
onClick={goToLastPage}
isDisabled={page === lastPage}
aria-label="Go to last page"
icon={<BsChevronDoubleRight />}
/>
</HStack>
<Paginator
numItemsLoaded={scenarios.length}
startIndex={startIndex}
lastPage={lastPage}
count={count}
/>
);
};

View File

@@ -1,6 +1,5 @@
import { Box, GridItem } from "@chakra-ui/react";
import { GridItem } from "@chakra-ui/react";
import React, { useState } from "react";
import { cellPadding } from "../constants";
import OutputCell from "./OutputCell/OutputCell";
import ScenarioEditor from "./ScenarioEditor";
import type { PromptVariant, Scenario } from "./types";
@@ -39,9 +38,7 @@ const ScenarioRow = (props: {
colStart={i + 2}
{...borders}
>
<Box h="100%" w="100%" px={cellPadding.x} py={cellPadding.y}>
<OutputCell key={variant.id} scenario={props.scenario} variant={variant} />
</Box>
<OutputCell key={variant.id} scenario={props.scenario} variant={variant} />
</GridItem>
))}
</>

View File

@@ -47,7 +47,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
return () => window.removeEventListener("keydown", handleEsc);
}, [isFullscreen, toggleFullscreen]);
const lastSavedFn = props.variant.constructFn;
const lastSavedFn = props.variant.promptConstructor;
const modifierKey = useModifierKeyLabel();
@@ -96,7 +96,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
const resp = await replaceVariant.mutateAsync({
id: props.variant.id,
constructFn: currentFn,
promptConstructor: currentFn,
streamScenarios: visibleScenarios,
});
if (resp.status === "error") {

View File

@@ -43,12 +43,12 @@ export default function VariantStats(props: { variant: PromptVariant }) {
return (
<HStack
justifyContent="space-between"
alignItems="center"
alignItems="flex-end"
mx="2"
fontSize="xs"
py={cellPadding.y}
>
<HStack px={cellPadding.x}>
<HStack px={cellPadding.x} flexWrap="wrap">
{showNumFinished && (
<Text>
{data.outputCount} / {data.scenarioCount}

View File

@@ -35,7 +35,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
pb={24}
pl={8}
display="grid"
gridTemplateColumns={`250px repeat(${variants.data.length}, minmax(300px, 1fr)) auto`}
gridTemplateColumns={`250px repeat(${variants.data.length}, minmax(320px, 1fr)) auto`}
sx={{
"> *": {
borderColor: "gray.300",

View File

@@ -0,0 +1,79 @@
import { Box, HStack, IconButton } from "@chakra-ui/react";
import {
BsChevronDoubleLeft,
BsChevronDoubleRight,
BsChevronLeft,
BsChevronRight,
} from "react-icons/bs";
import { usePage } from "~/utils/hooks";
const Paginator = ({
numItemsLoaded,
startIndex,
lastPage,
count,
}: {
numItemsLoaded: number;
startIndex: number;
lastPage: number;
count: number;
}) => {
const [page, setPage] = usePage();
const nextPage = () => {
if (page < lastPage) {
setPage(page + 1, "replace");
}
};
const prevPage = () => {
if (page > 1) {
setPage(page - 1, "replace");
}
};
const goToLastPage = () => setPage(lastPage, "replace");
const goToFirstPage = () => setPage(1, "replace");
return (
<HStack pt={4}>
<IconButton
variant="ghost"
size="sm"
onClick={goToFirstPage}
isDisabled={page === 1}
aria-label="Go to first page"
icon={<BsChevronDoubleLeft />}
/>
<IconButton
variant="ghost"
size="sm"
onClick={prevPage}
isDisabled={page === 1}
aria-label="Previous page"
icon={<BsChevronLeft />}
/>
<Box>
{startIndex}-{startIndex + numItemsLoaded - 1} / {count}
</Box>
<IconButton
variant="ghost"
size="sm"
onClick={nextPage}
isDisabled={page === lastPage}
aria-label="Next page"
icon={<BsChevronRight />}
/>
<IconButton
variant="ghost"
size="sm"
onClick={goToLastPage}
isDisabled={page === lastPage}
aria-label="Go to last page"
icon={<BsChevronDoubleRight />}
/>
</HStack>
);
};
export default Paginator;

View File

@@ -20,7 +20,7 @@ import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
import { type PromptVariant } from "@prisma/client";
import { useState } from "react";
import CompareFunctions from "./CompareFunctions";
import { CustomInstructionsInput } from "./CustomInstructionsInput";
import { CustomInstructionsInput } from "../CustomInstructionsInput";
import { RefineAction } from "./RefineAction";
import { isObject, isString } from "lodash-es";
import { type RefinementAction, type SupportedProvider } from "~/modelProviders/types";
@@ -73,7 +73,7 @@ export const RefinePromptModal = ({
return;
await replaceVariantMutation.mutateAsync({
id: variant.id,
constructFn: refinedPromptFn,
promptConstructor: refinedPromptFn,
streamScenarios: visibleScenarios,
});
await utils.promptVariants.list.invalidate();
@@ -97,7 +97,7 @@ export const RefinePromptModal = ({
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack spacing={8}>
<VStack spacing={4}>
<VStack spacing={4} w="full">
{Object.keys(refinementActions).length && (
<>
<SimpleGrid columns={{ base: 1, md: 2 }} spacing={8}>
@@ -122,11 +122,11 @@ export const RefinePromptModal = ({
instructions={instructions}
setInstructions={setInstructions}
loading={modificationInProgress}
onSubmit={getModifiedPromptFn}
onSubmit={() => getModifiedPromptFn()}
/>
</VStack>
<CompareFunctions
originalFunction={variant.constructFn}
originalFunction={variant.promptConstructor}
newFunction={isString(refinedPromptFn) ? refinedPromptFn : undefined}
maxH="40vh"
/>

View File

@@ -0,0 +1,110 @@
import {
HStack,
Icon,
VStack,
Text,
Divider,
Spinner,
AspectRatio,
SkeletonText,
} from "@chakra-ui/react";
import { RiDatabase2Line } from "react-icons/ri";
import { formatTimePast } from "~/utils/dayjs";
import Link from "next/link";
import { useRouter } from "next/router";
import { BsPlusSquare } from "react-icons/bs";
import { api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks";
type DatasetData = {
name: string;
numEntries: number;
id: string;
createdAt: Date;
updatedAt: Date;
};
export const DatasetCard = ({ dataset }: { dataset: DatasetData }) => {
return (
<AspectRatio ratio={1.2} w="full">
<VStack
as={Link}
href={{ pathname: "/data/[id]", query: { id: dataset.id } }}
bg="gray.50"
_hover={{ bg: "gray.100" }}
transition="background 0.2s"
cursor="pointer"
borderColor="gray.200"
borderWidth={1}
p={4}
justify="space-between"
>
<HStack w="full" color="gray.700" justify="center">
<Icon as={RiDatabase2Line} boxSize={4} />
<Text fontWeight="bold">{dataset.name}</Text>
</HStack>
<HStack h="full" spacing={4} flex={1} align="center">
<CountLabel label="Rows" count={dataset.numEntries} />
</HStack>
<HStack w="full" color="gray.500" fontSize="xs" textAlign="center">
<Text flex={1}>Created {formatTimePast(dataset.createdAt)}</Text>
<Divider h={4} orientation="vertical" />
<Text flex={1}>Updated {formatTimePast(dataset.updatedAt)}</Text>
</HStack>
</VStack>
</AspectRatio>
);
};
const CountLabel = ({ label, count }: { label: string; count: number }) => {
return (
<VStack alignItems="center" flex={1}>
<Text color="gray.500" fontWeight="bold">
{label}
</Text>
<Text fontSize="sm" color="gray.500">
{count}
</Text>
</VStack>
);
};
export const NewDatasetCard = () => {
const router = useRouter();
const createMutation = api.datasets.create.useMutation();
const [createDataset, isLoading] = useHandledAsyncCallback(async () => {
const newDataset = await createMutation.mutateAsync({ label: "New Dataset" });
await router.push({ pathname: "/data/[id]", query: { id: newDataset.id } });
}, [createMutation, router]);
return (
<AspectRatio ratio={1.2} w="full">
<VStack
align="center"
justify="center"
_hover={{ cursor: "pointer", bg: "gray.50" }}
transition="background 0.2s"
cursor="pointer"
borderColor="gray.200"
borderWidth={1}
p={4}
onClick={createDataset}
>
<Icon as={isLoading ? Spinner : BsPlusSquare} boxSize={8} />
<Text display={{ base: "none", md: "block" }} ml={2}>
New Dataset
</Text>
</VStack>
</AspectRatio>
);
};
export const DatasetCardSkeleton = () => (
<AspectRatio ratio={1.2} w="full">
<VStack align="center" borderColor="gray.200" borderWidth={1} p={4} bg="gray.50">
<SkeletonText noOfLines={1} w="80%" />
<SkeletonText noOfLines={2} w="60%" />
<SkeletonText noOfLines={1} w="80%" />
</VStack>
</AspectRatio>
);

View File

@@ -0,0 +1,21 @@
import { useDatasetEntries } from "~/utils/hooks";
import Paginator from "../Paginator";
const DatasetEntriesPaginator = () => {
const { data } = useDatasetEntries();
if (!data) return null;
const { entries, startIndex, lastPage, count } = data;
return (
<Paginator
numItemsLoaded={entries.length}
startIndex={startIndex}
lastPage={lastPage}
count={count}
/>
);
};
export default DatasetEntriesPaginator;

View File

@@ -0,0 +1,31 @@
import { type StackProps, VStack, Table, Th, Tr, Thead, Tbody, Text } from "@chakra-ui/react";
import { useDatasetEntries } from "~/utils/hooks";
import TableRow from "./TableRow";
import DatasetEntriesPaginator from "./DatasetEntriesPaginator";
const DatasetEntriesTable = (props: StackProps) => {
const { data } = useDatasetEntries();
return (
<VStack justifyContent="space-between" {...props}>
<Table variant="simple" sx={{ "table-layout": "fixed", width: "full" }}>
<Thead>
<Tr>
<Th>Input</Th>
<Th>Output</Th>
</Tr>
</Thead>
<Tbody>{data?.entries.map((entry) => <TableRow key={entry.id} entry={entry} />)}</Tbody>
</Table>
{(!data || data.entries.length) === 0 ? (
<Text alignSelf="flex-start" pl={6} color="gray.500">
No entries found
</Text>
) : (
<DatasetEntriesPaginator />
)}
</VStack>
);
};
export default DatasetEntriesTable;

View File

@@ -0,0 +1,26 @@
import { Button, HStack, useDisclosure } from "@chakra-ui/react";
import { BiImport } from "react-icons/bi";
import { BsStars } from "react-icons/bs";
import { GenerateDataModal } from "./GenerateDataModal";
export const DatasetHeaderButtons = () => {
const generateModalDisclosure = useDisclosure();
return (
<>
<HStack>
<Button leftIcon={<BiImport />} colorScheme="blue" variant="ghost">
Import Data
</Button>
<Button leftIcon={<BsStars />} colorScheme="blue" onClick={generateModalDisclosure.onOpen}>
Generate Data
</Button>
</HStack>
<GenerateDataModal
isOpen={generateModalDisclosure.isOpen}
onClose={generateModalDisclosure.onClose}
/>
</>
);
};

View File

@@ -0,0 +1,128 @@
import {
Modal,
ModalBody,
ModalCloseButton,
ModalContent,
ModalHeader,
ModalOverlay,
ModalFooter,
Text,
HStack,
VStack,
Icon,
NumberInput,
NumberInputField,
NumberInputStepper,
NumberIncrementStepper,
NumberDecrementStepper,
Button,
} from "@chakra-ui/react";
import { BsStars } from "react-icons/bs";
import { useState } from "react";
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
import { api } from "~/utils/api";
import AutoResizeTextArea from "~/components/AutoResizeTextArea";
export const GenerateDataModal = ({
isOpen,
onClose,
}: {
isOpen: boolean;
onClose: () => void;
}) => {
const utils = api.useContext();
const datasetId = useDataset().data?.id;
const [numToGenerate, setNumToGenerate] = useState<number>(20);
const [inputDescription, setInputDescription] = useState<string>(
"Each input should contain an email body. Half of the emails should contain event details, and the other half should not.",
);
const [outputDescription, setOutputDescription] = useState<string>(
`Each output should contain "true" or "false", where "true" indicates that the email contains event details.`,
);
const generateEntriesMutation = api.datasetEntries.autogenerateEntries.useMutation();
const [generateEntries, generateEntriesInProgress] = useHandledAsyncCallback(async () => {
if (!inputDescription || !outputDescription || !numToGenerate || !datasetId) return;
await generateEntriesMutation.mutateAsync({
datasetId,
inputDescription,
outputDescription,
numToGenerate,
});
await utils.datasetEntries.list.invalidate();
onClose();
}, [
generateEntriesMutation,
onClose,
inputDescription,
outputDescription,
numToGenerate,
datasetId,
]);
return (
<Modal isOpen={isOpen} onClose={onClose} size={{ base: "xl", sm: "2xl", md: "3xl" }}>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={BsStars} />
<Text>Generate Data</Text>
</HStack>
</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack w="full" spacing={8} padding={8} alignItems="flex-start">
<VStack alignItems="flex-start" spacing={2}>
<Text fontWeight="bold">Number of Rows:</Text>
<NumberInput
step={5}
defaultValue={15}
min={0}
max={100}
onChange={(valueString) => setNumToGenerate(parseInt(valueString) || 0)}
value={numToGenerate}
w="24"
>
<NumberInputField />
<NumberInputStepper>
<NumberIncrementStepper />
<NumberDecrementStepper />
</NumberInputStepper>
</NumberInput>
</VStack>
<VStack alignItems="flex-start" w="full" spacing={2}>
<Text fontWeight="bold">Input Description:</Text>
<AutoResizeTextArea
value={inputDescription}
onChange={(e) => setInputDescription(e.target.value)}
placeholder="Each input should contain..."
/>
</VStack>
<VStack alignItems="flex-start" w="full" spacing={2}>
<Text fontWeight="bold">Output Description (optional):</Text>
<AutoResizeTextArea
value={outputDescription}
onChange={(e) => setOutputDescription(e.target.value)}
placeholder="The output should contain..."
/>
</VStack>
</VStack>
</ModalBody>
<ModalFooter>
<Button
colorScheme="blue"
isLoading={generateEntriesInProgress}
isDisabled={!numToGenerate || !inputDescription || !outputDescription}
onClick={generateEntries}
>
Generate
</Button>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -0,0 +1,13 @@
import { Td, Tr } from "@chakra-ui/react";
import { type DatasetEntry } from "@prisma/client";
const TableRow = ({ entry }: { entry: DatasetEntry }) => {
return (
<Tr key={entry.id}>
<Td>{entry.input}</Td>
<Td>{entry.output}</Td>
</Tr>
);
};
export default TableRow;

View File

@@ -5,7 +5,7 @@ import { BsGearFill } from "react-icons/bs";
import { TbGitFork } from "react-icons/tb";
import { useAppStore } from "~/state/store";
export const HeaderButtons = () => {
export const ExperimentHeaderButtons = () => {
const experiment = useExperiment();
const canModify = experiment.data?.access.canModify ?? false;

View File

@@ -8,42 +8,43 @@ import {
Text,
Box,
type BoxProps,
type LinkProps,
Link,
Link as ChakraLink,
Flex,
} from "@chakra-ui/react";
import Head from "next/head";
import Link, { type LinkProps } from "next/link";
import { BsGithub, BsPersonCircle } from "react-icons/bs";
import { useRouter } from "next/router";
import { type IconType } from "react-icons";
import { RiFlaskLine } from "react-icons/ri";
import { RiDatabase2Line, RiFlaskLine } from "react-icons/ri";
import { signIn, useSession } from "next-auth/react";
import UserMenu from "./UserMenu";
import { env } from "~/env.mjs";
type IconLinkProps = BoxProps & LinkProps & { label?: string; icon: IconType };
type IconLinkProps = BoxProps & LinkProps & { label?: string; icon: IconType; href: string };
const IconLink = ({ icon, label, href, target, color, ...props }: IconLinkProps) => {
const IconLink = ({ icon, label, href, color, ...props }: IconLinkProps) => {
const router = useRouter();
const isActive = href && router.pathname.startsWith(href);
return (
<HStack
w="full"
p={4}
color={color}
as={Link}
href={href}
target={target}
bgColor={isActive ? "gray.200" : "transparent"}
_hover={{ bgColor: "gray.200", textDecoration: "none" }}
justifyContent="start"
cursor="pointer"
{...props}
>
<Icon as={icon} boxSize={6} mr={2} />
<Text fontWeight="bold" fontSize="sm">
{label}
</Text>
</HStack>
<Link href={href} style={{ width: "100%" }}>
<HStack
w="full"
p={4}
color={color}
as={ChakraLink}
bgColor={isActive ? "gray.200" : "transparent"}
_hover={{ bgColor: "gray.300", textDecoration: "none" }}
justifyContent="start"
cursor="pointer"
{...props}
>
<Icon as={icon} boxSize={6} mr={2} />
<Text fontWeight="bold" fontSize="sm">
{label}
</Text>
</HStack>
</Link>
);
};
@@ -72,16 +73,28 @@ const NavSidebar = () => {
{user != null && (
<>
<IconLink icon={RiFlaskLine} label="Experiments" href="/experiments" />
{env.NEXT_PUBLIC_SHOW_DATA && (
<IconLink icon={RiDatabase2Line} label="Data" href="/data" />
)}
</>
)}
{user === null && (
<IconLink
icon={BsPersonCircle}
label="Sign In"
<HStack
w="full"
p={4}
as={ChakraLink}
_hover={{ bgColor: "gray.300", textDecoration: "none" }}
justifyContent="start"
cursor="pointer"
onClick={() => {
signIn("github").catch(console.error);
}}
/>
>
<Icon as={BsPersonCircle} boxSize={6} mr={2} />
<Text fontWeight="bold" fontSize="sm">
Sign In
</Text>
</HStack>
)}
</VStack>
{user ? (
@@ -90,7 +103,7 @@ const NavSidebar = () => {
<Divider />
)}
<VStack spacing={0} align="center">
<Link
<ChakraLink
href="https://github.com/openpipe/openpipe"
target="_blank"
color="gray.500"
@@ -98,7 +111,7 @@ const NavSidebar = () => {
p={2}
>
<Icon as={BsGithub} boxSize={6} />
</Link>
</ChakraLink>
</VStack>
</VStack>
);

View File

@@ -19,6 +19,7 @@ export const env = createEnv({
OPENAI_API_KEY: z.string().min(1),
REPLICATE_API_TOKEN: z.string().default("placeholder"),
ANTHROPIC_API_KEY: z.string().default("placeholder"),
SENTRY_AUTH_TOKEN: z.string().optional(),
},
/**
@@ -29,6 +30,9 @@ export const env = createEnv({
client: {
NEXT_PUBLIC_POSTHOG_KEY: z.string().optional(),
NEXT_PUBLIC_SOCKET_URL: z.string().url().default("http://localhost:3318"),
NEXT_PUBLIC_HOST: z.string().url().default("http://localhost:3000"),
NEXT_PUBLIC_SENTRY_DSN: z.string().optional(),
NEXT_PUBLIC_SHOW_DATA: z.string().optional(),
},
/**
@@ -42,10 +46,14 @@ export const env = createEnv({
RESTRICT_PRISMA_LOGS: process.env.RESTRICT_PRISMA_LOGS,
NEXT_PUBLIC_POSTHOG_KEY: process.env.NEXT_PUBLIC_POSTHOG_KEY,
NEXT_PUBLIC_SOCKET_URL: process.env.NEXT_PUBLIC_SOCKET_URL,
NEXT_PUBLIC_HOST: process.env.NEXT_PUBLIC_HOST,
NEXT_PUBLIC_SHOW_DATA: process.env.NEXT_PUBLIC_SHOW_DATA,
GITHUB_CLIENT_ID: process.env.GITHUB_CLIENT_ID,
GITHUB_CLIENT_SECRET: process.env.GITHUB_CLIENT_SECRET,
REPLICATE_API_TOKEN: process.env.REPLICATE_API_TOKEN,
ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY,
NEXT_PUBLIC_SENTRY_DSN: process.env.NEXT_PUBLIC_SENTRY_DSN,
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
},
/**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.

View File

@@ -0,0 +1,63 @@
{
"type": "object",
"properties": {
"model": {
"description": "The model that will complete your prompt.",
"x-oaiTypeLabel": "string",
"type": "string",
"enum": [
"claude-2",
"claude-2.0",
"claude-instant-1",
"claude-instant-1.1"
]
},
"prompt": {
"description": "The prompt that you want Claude to complete.\n\nFor proper response generation you will need to format your prompt as follows:\n\"\\n\\nHuman: all instructions for the assistant\\n\\nAssistant:\". The prompt string should begin with the characters \"Human:\" and end with \"Assistant:\".",
"default": "<|endoftext|>",
"example": "\\n\\nHuman: What is the correct translation of ${scenario.input}? I would like a long analysis followed by a short answer.\\n\\nAssistant:",
"type": "string"
},
"max_tokens_to_sample": {
"type": "integer",
"minimum": 1,
"default": 256,
"nullable": true,
"description": "The maximum number of tokens to generate before stopping."
},
"temperature": {
"type": "number",
"minimum": 0,
"maximum": 1,
"nullable": true,
"description": "Amount of randomness injected into the response.\n\nDefaults to 1."
},
"top_p": {
"type": "number",
"minimum": 0,
"maximum": 1,
"nullable": true,
"description": "Use nucleus sampling.\n\nYou should either alter temperature or top_p, but not both.\n"
},
"top_k": {
"type": "number",
"minimum": 0,
"default": 5,
"nullable": true,
"description": "Only sample from the top K options for each subsequent token."
},
"stream": {
"description": "Whether to incrementally stream the response using server-sent events.",
"type": "boolean",
"nullable": true,
"default": false
},
"stop_sequences": {
"description": "Sequences that will cause the model to stop generating completion text.\nBy default, our models stop on \"\\n\\nHuman:\".",
"default": null,
"nullable": true,
"type": "array"
}
},
"required": ["model", "prompt", "max_tokens_to_sample"]
}

View File

@@ -13,8 +13,9 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, Completion> =
promptTokenPrice: 11.02 / 1000000,
completionTokenPrice: 32.68 / 1000000,
speed: "medium",
provider: "anthropic",
provider: "anthropic/completion",
learnMoreUrl: "https://www.anthropic.com/product",
apiDocsUrl: "https://docs.anthropic.com/claude/reference/complete_post",
},
"claude-instant-1.1": {
name: "Claude Instant 1.1",
@@ -22,8 +23,9 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, Completion> =
promptTokenPrice: 1.63 / 1000000,
completionTokenPrice: 5.51 / 1000000,
speed: "fast",
provider: "anthropic",
provider: "anthropic/completion",
learnMoreUrl: "https://www.anthropic.com/product",
apiDocsUrl: "https://docs.anthropic.com/claude/reference/complete_post",
},
},

View File

@@ -1,129 +0,0 @@
{
"type": "object",
"properties": {
"model": {
"description": "The model that will complete your prompt.\nAs we improve Claude, we develop new versions of it that you can query.\nThis parameter controls which version of Claude answers your request.\nRight now we are offering two model families: Claude, and Claude Instant.\nYou can use them by setting model to \"claude-2\" or \"claude-instant-1\", respectively.\nSee models for additional details.\n",
"x-oaiTypeLabel": "string",
"type": "string",
"enum": [
"claude-2",
"claude-2.0",
"claude-instant-1",
"claude-instant-1.1"
]
},
"prompt": {
"description": "The prompt that you want Claude to complete.\n\nFor proper response generation you will need to format your prompt as follows:\n\\n\\nHuman: ${userQuestion}\\n\\nAssistant:\nSee our comments on prompts for more context.\n",
"default": "<|endoftext|>",
"nullable": true,
"oneOf": [
{
"type": "string",
"default": "",
"example": "This is a test."
},
{
"type": "array",
"items": {
"type": "string",
"default": "",
"example": "This is a test."
}
},
{
"type": "array",
"minItems": 1,
"items": {
"type": "integer"
},
"example": "[1212, 318, 257, 1332, 13]"
},
{
"type": "array",
"minItems": 1,
"items": {
"type": "array",
"minItems": 1,
"items": {
"type": "integer"
}
},
"example": "[[1212, 318, 257, 1332, 13]]"
}
]
},
"max_tokens_to_sample": {
"type": "integer",
"minimum": 1,
"default": 256,
"example": 256,
"nullable": true,
"description": "The maximum number of tokens to generate before stopping.\n\nNote that our models may stop before reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.\n"
},
"temperature": {
"type": "number",
"minimum": 0,
"maximum": 1,
"default": 1,
"example": 1,
"nullable": true,
"description": "Amount of randomness injected into the response.\n\nDefaults to 1. Ranges from 0 to 1. Use temp closer to 0 for analytical / multiple choice, and closer to 1 for creative and generative tasks.\n"
},
"top_p": {
"type": "number",
"minimum": 0,
"maximum": 1,
"default": 1,
"example": 1,
"nullable": true,
"description": "Use nucleus sampling.\n\nIn nucleus sampling, we compute the cumulative distribution over all the options \nfor each subsequent token in decreasing probability order and cut it off once \nit reaches a particular probability specified by top_p. You should either alter temperature or top_p, but not both.\n"
},
"top_k": {
"type": "number",
"minimum": 0,
"default": 5,
"example": 5,
"nullable": true,
"description": "Only sample from the top K options for each subsequent token.\n\nUsed to remove \"long tail\" low probability responses. Learn more technical details here.\n"
},
"stream": {
"description": "Whether to incrementally stream the response using server-sent events.\nSee this guide to SSE events for details.type: boolean\n",
"nullable": true,
"default": false
},
"stop_sequences": {
"description": "Sequences that will cause the model to stop generating completion text.\nOur models stop on \"\\n\\nHuman:\", and may include additional built-in stop sequences in the future. By providing the stop_sequences parameter, you may include additional strings that will cause the model to stop generating.\n",
"default": null,
"nullable": true,
"oneOf": [
{
"type": "string",
"default": "<|endoftext|>",
"example": "\n",
"nullable": true
},
{
"type": "array",
"minItems": 1,
"maxItems": 4,
"items": {
"type": "string",
"example": "[\"\\n\"]"
}
}
]
},
"metadata": {
"type": "object",
"properties": {
"user_id": {
"type": "string",
"example": "13803d75-b4b5-4c3e-b2a2-6f21399b021b",
"description": "An external identifier for the user who is associated with the request.\n\nThis should be a uuid, hash value, or other opaque identifier. Anthropic may use this id to help detect abuse. \nDo not include any identifying information such as name, email address, or phone number.\n"
}
},
"description": "An object describing metadata about the request.\n"
}
},
"required": ["model", "prompt", "max_tokens_to_sample"]
}

View File

@@ -1,6 +1,6 @@
import openaiChatCompletionFrontend from "./openai-ChatCompletion/frontend";
import replicateLlama2Frontend from "./replicate-llama2/frontend";
import anthropicFrontend from "./anthropic/frontend";
import anthropicFrontend from "./anthropic-completion/frontend";
import { type SupportedProvider, type FrontendModelProvider } from "./types";
// Keep attributes here that need to be accessible from the frontend. We can't
@@ -9,7 +9,7 @@ import { type SupportedProvider, type FrontendModelProvider } from "./types";
const frontendModelProviders: Record<SupportedProvider, FrontendModelProvider<any, any>> = {
"openai/ChatCompletion": openaiChatCompletionFrontend,
"replicate/llama2": replicateLlama2Frontend,
anthropic: anthropicFrontend,
"anthropic/completion": anthropicFrontend,
};
export default frontendModelProviders;

View File

@@ -1,12 +1,12 @@
import openaiChatCompletion from "./openai-ChatCompletion";
import replicateLlama2 from "./replicate-llama2";
import anthropic from "./anthropic";
import anthropicCompletion from "./anthropic-completion";
import { type SupportedProvider, type ModelProvider } from "./types";
const modelProviders: Record<SupportedProvider, ModelProvider<any, any, any>> = {
"openai/ChatCompletion": openaiChatCompletion,
"replicate/llama2": replicateLlama2,
anthropic,
"anthropic/completion": anthropicCompletion,
};
export default modelProviders;

View File

@@ -6,7 +6,7 @@ import {
} from "openai/resources/chat";
import { countOpenAIChatTokens } from "~/utils/countTokens";
import { type CompletionResponse } from "../types";
import { omit } from "lodash-es";
import { isArray, isString, omit } from "lodash-es";
import { openai } from "~/server/utils/openai";
import { truthyFilter } from "~/utils/utils";
import { APIError } from "openai";
@@ -40,6 +40,8 @@ const mergeStreamedChunks = (
((choice.delta.function_call.arguments as string) ?? "");
}
} else {
// @ts-expect-error the types are correctly telling us that finish_reason
// could be null, but don't want to fix it right now.
choices.push({ ...omit(choice, "delta"), message: { role: "assistant", ...choice.delta } });
}
}
@@ -64,6 +66,7 @@ export async function getCompletion(
try {
if (onStream) {
console.log("got started");
const resp = await openai.chat.completions.create(
{ ...input, stream: true },
{
@@ -71,9 +74,11 @@ export async function getCompletion(
},
);
for await (const part of resp) {
console.log("got part", part);
finalCompletion = mergeStreamedChunks(finalCompletion, part);
onStream(finalCompletion);
}
console.log("got final", finalCompletion);
if (!finalCompletion) {
return {
type: "error",
@@ -121,9 +126,17 @@ export async function getCompletion(
};
} catch (error: unknown) {
if (error instanceof APIError) {
// The types from the sdk are wrong
const rawMessage = error.message as string | string[];
// If the message is not a string, stringify it
const message = isString(rawMessage)
? rawMessage
: isArray(rawMessage)
? rawMessage.map((m) => m.toString()).join("\n")
: (rawMessage as any).toString();
return {
type: "error",
message: error.message,
message,
autoRetry: error.status === 429 || error.status === 503,
statusCode: error.status,
};

View File

@@ -8,8 +8,8 @@ const replicate = new Replicate({
});
const modelIds: Record<ReplicateLlama2Input["model"], string> = {
"7b-chat": "058333670f2a6e88cf1b29b8183405b17bb997767282f790b82137df8c090c1f",
"13b-chat": "d5da4236b006f967ceb7da037be9cfc3924b20d21fed88e1e94f19d56e2d3111",
"7b-chat": "4f0b260b6a13eb53a6b1891f089d57c08f41003ae79458be5011303d81a394dc",
"13b-chat": "2a7f981751ec7fdf87b5b91ad4db53683a98082e9ff7bfd12c8cd5ea85980a52",
"70b-chat": "2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1",
};

View File

@@ -54,7 +54,7 @@ const modelProvider: ReplicateLlama2Provider = {
temperature: {
type: "number",
description:
"Adjusts randomness of outputs, greater than 1 is random and 0 is deterministic, 0.75 is a good starting value. (minimum: 0.01; maximum: 5)",
"Adjusts randomness of outputs, 0.1 is a good starting value. (minimum: 0.01; maximum: 5)",
},
top_p: {
type: "number",

View File

@@ -6,7 +6,7 @@ import { z } from "zod";
export const ZodSupportedProvider = z.union([
z.literal("openai/ChatCompletion"),
z.literal("replicate/llama2"),
z.literal("anthropic"),
z.literal("anthropic/completion"),
]);
export type SupportedProvider = z.infer<typeof ZodSupportedProvider>;
@@ -21,6 +21,7 @@ export type Model = {
provider: SupportedProvider;
description?: string;
learnMoreUrl?: string;
apiDocsUrl?: string;
};
export type ProviderModel = { provider: z.infer<typeof ZodSupportedProvider>; model: string };

View File

@@ -3,12 +3,12 @@ import { SessionProvider } from "next-auth/react";
import { type AppType } from "next/app";
import { api } from "~/utils/api";
import Favicon from "~/components/Favicon";
import "~/utils/analytics";
import Head from "next/head";
import { ChakraThemeProvider } from "~/theme/ChakraThemeProvider";
import { SyncAppStore } from "~/state/sync";
import NextAdapterApp from "next-query-params/app";
import { QueryParamProvider } from "use-query-params";
import { SessionIdentifier } from "~/utils/analytics/clientAnalytics";
const MyApp: AppType<{ session: Session | null }> = ({
Component,
@@ -36,6 +36,7 @@ const MyApp: AppType<{ session: Session | null }> = ({
<SessionProvider session={session}>
<SyncAppStore />
<Favicon />
<SessionIdentifier />
<ChakraThemeProvider>
<QueryParamProvider adapter={NextAdapterApp}>
<Component {...pageProps} />

View File

@@ -9,7 +9,7 @@ const inconsolataRegularFontP = fetch(
new URL("../../../../public/fonts/Inconsolata_SemiExpanded-Medium.ttf", import.meta.url),
).then((res) => res.arrayBuffer());
const OgImage = async (req: NextApiRequest, res: NextApiResponse) => {
const OgImage = async (req: NextApiRequest, _res: NextApiResponse) => {
// @ts-expect-error - nextUrl is not defined on NextApiRequest for some reason
const searchParams = req.nextUrl?.searchParams as URLSearchParams;
const experimentLabel = searchParams.get("experimentLabel");

View File

@@ -0,0 +1,6 @@
// A faulty API route to test Sentry's error monitoring
// @ts-expect-error just a test file, don't care about types
export default function handler(_req, res) {
throw new Error("Sentry Example API Route Error");
res.status(200).json({ name: "John Doe" });
}

99
src/pages/data/[id].tsx Normal file
View File

@@ -0,0 +1,99 @@
import {
Box,
Breadcrumb,
BreadcrumbItem,
Center,
Flex,
Icon,
Input,
VStack,
} from "@chakra-ui/react";
import Link from "next/link";
import { useRouter } from "next/router";
import { useState, useEffect } from "react";
import { RiDatabase2Line } from "react-icons/ri";
import AppShell from "~/components/nav/AppShell";
import { api } from "~/utils/api";
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
import DatasetEntriesTable from "~/components/datasets/DatasetEntriesTable";
import { DatasetHeaderButtons } from "~/components/datasets/DatasetHeaderButtons/DatasetHeaderButtons";
export default function Dataset() {
const router = useRouter();
const utils = api.useContext();
const dataset = useDataset();
const datasetId = router.query.id as string;
const [name, setName] = useState(dataset.data?.name || "");
useEffect(() => {
setName(dataset.data?.name || "");
}, [dataset.data?.name]);
const updateMutation = api.datasets.update.useMutation();
const [onSaveName] = useHandledAsyncCallback(async () => {
if (name && name !== dataset.data?.name && dataset.data?.id) {
await updateMutation.mutateAsync({
id: dataset.data.id,
updates: { name: name },
});
await Promise.all([utils.datasets.list.invalidate(), utils.datasets.get.invalidate()]);
}
}, [updateMutation, dataset.data?.id, dataset.data?.name, name]);
if (!dataset.isLoading && !dataset.data) {
return (
<AppShell title="Dataset not found">
<Center h="100%">
<div>Dataset not found 😕</div>
</Center>
</AppShell>
);
}
return (
<AppShell title={dataset.data?.name}>
<VStack h="full">
<Flex
pl={4}
pr={8}
py={2}
w="full"
direction={{ base: "column", sm: "row" }}
alignItems={{ base: "flex-start", sm: "center" }}
>
<Breadcrumb flex={1} mt={1}>
<BreadcrumbItem>
<Link href="/data">
<Flex alignItems="center" _hover={{ textDecoration: "underline" }}>
<Icon as={RiDatabase2Line} boxSize={4} mr={2} /> Datasets
</Flex>
</Link>
</BreadcrumbItem>
<BreadcrumbItem isCurrentPage>
<Input
size="sm"
value={name}
onChange={(e) => setName(e.target.value)}
onBlur={onSaveName}
borderWidth={1}
borderColor="transparent"
fontSize={16}
px={0}
minW={{ base: 100, lg: 300 }}
flex={1}
_hover={{ borderColor: "gray.300" }}
_focus={{ borderColor: "blue.500", outline: "none" }}
/>
</BreadcrumbItem>
</Breadcrumb>
<DatasetHeaderButtons />
</Flex>
<Box w="full" overflowX="auto" flex={1} pl={4} pr={8} pt={8} pb={16}>
{datasetId && <DatasetEntriesTable />}
</Box>
</VStack>
</AppShell>
);
}

83
src/pages/data/index.tsx Normal file
View File

@@ -0,0 +1,83 @@
import {
SimpleGrid,
Icon,
VStack,
Breadcrumb,
BreadcrumbItem,
Flex,
Center,
Text,
Link,
HStack,
} from "@chakra-ui/react";
import AppShell from "~/components/nav/AppShell";
import { api } from "~/utils/api";
import { signIn, useSession } from "next-auth/react";
import { RiDatabase2Line } from "react-icons/ri";
import {
DatasetCard,
DatasetCardSkeleton,
NewDatasetCard,
} from "~/components/datasets/DatasetCard";
export default function DatasetsPage() {
const datasets = api.datasets.list.useQuery();
const user = useSession().data;
const authLoading = useSession().status === "loading";
if (user === null || authLoading) {
return (
<AppShell title="Data">
<Center h="100%">
{!authLoading && (
<Text>
<Link
onClick={() => {
signIn("github").catch(console.error);
}}
textDecor="underline"
>
Sign in
</Link>{" "}
to view or create new datasets!
</Text>
)}
</Center>
</AppShell>
);
}
return (
<AppShell title="Data">
<VStack alignItems={"flex-start"} px={4} py={2}>
<HStack minH={8} align="center" pt={2}>
<Breadcrumb flex={1}>
<BreadcrumbItem>
<Flex alignItems="center">
<Icon as={RiDatabase2Line} boxSize={4} mr={2} /> Datasets
</Flex>
</BreadcrumbItem>
</Breadcrumb>
</HStack>
<SimpleGrid w="full" columns={{ base: 1, md: 2, lg: 3, xl: 4 }} spacing={8} p="4">
<NewDatasetCard />
{datasets.data && !datasets.isLoading ? (
datasets?.data?.map((dataset) => (
<DatasetCard
key={dataset.id}
dataset={{ ...dataset, numEntries: dataset._count.datasetEntries }}
/>
))
) : (
<>
<DatasetCardSkeleton />
<DatasetCardSkeleton />
<DatasetCardSkeleton />
</>
)}
</SimpleGrid>
</VStack>
</AppShell>
);
}

View File

@@ -21,7 +21,7 @@ import { api } from "~/utils/api";
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
import { useAppStore } from "~/state/store";
import { useSyncVariantEditor } from "~/state/sync";
import { HeaderButtons } from "~/components/experiments/HeaderButtons/HeaderButtons";
import { ExperimentHeaderButtons } from "~/components/experiments/ExperimentHeaderButtons/ExperimentHeaderButtons";
import Head from "next/head";
// TODO: import less to fix deployment with server side props
@@ -142,7 +142,7 @@ export default function Experiment() {
)}
</BreadcrumbItem>
</Breadcrumb>
<HeaderButtons />
<ExperimentHeaderButtons />
</Flex>
<ExperimentSettingsDrawer />
<Box w="100%" overflowX="auto" flex={1}>

View File

@@ -0,0 +1,84 @@
import Head from "next/head";
import * as Sentry from "@sentry/nextjs";
export default function Home() {
return (
<div>
<Head>
<title>Sentry Onboarding</title>
<meta name="description" content="Test Sentry for your Next.js app!" />
</Head>
<main
style={{
minHeight: "100vh",
display: "flex",
flexDirection: "column",
justifyContent: "center",
alignItems: "center",
}}
>
<h1 style={{ fontSize: "4rem", margin: "14px 0" }}>
<svg
style={{
height: "1em",
}}
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 200 44"
>
<path
fill="currentColor"
d="M124.32,28.28,109.56,9.22h-3.68V34.77h3.73V15.19l15.18,19.58h3.26V9.22h-3.73ZM87.15,23.54h13.23V20.22H87.14V12.53h14.93V9.21H83.34V34.77h18.92V31.45H87.14ZM71.59,20.3h0C66.44,19.06,65,18.08,65,15.7c0-2.14,1.89-3.59,4.71-3.59a12.06,12.06,0,0,1,7.07,2.55l2-2.83a14.1,14.1,0,0,0-9-3c-5.06,0-8.59,3-8.59,7.27,0,4.6,3,6.19,8.46,7.52C74.51,24.74,76,25.78,76,28.11s-2,3.77-5.09,3.77a12.34,12.34,0,0,1-8.3-3.26l-2.25,2.69a15.94,15.94,0,0,0,10.42,3.85c5.48,0,9-2.95,9-7.51C79.75,23.79,77.47,21.72,71.59,20.3ZM195.7,9.22l-7.69,12-7.64-12h-4.46L186,24.67V34.78h3.84V24.55L200,9.22Zm-64.63,3.46h8.37v22.1h3.84V12.68h8.37V9.22H131.08ZM169.41,24.8c3.86-1.07,6-3.77,6-7.63,0-4.91-3.59-8-9.38-8H154.67V34.76h3.8V25.58h6.45l6.48,9.2h4.44l-7-9.82Zm-10.95-2.5V12.6h7.17c3.74,0,5.88,1.77,5.88,4.84s-2.29,4.86-5.84,4.86Z M29,2.26a4.67,4.67,0,0,0-8,0L14.42,13.53A32.21,32.21,0,0,1,32.17,40.19H27.55A27.68,27.68,0,0,0,12.09,17.47L6,28a15.92,15.92,0,0,1,9.23,12.17H4.62A.76.76,0,0,1,4,39.06l2.94-5a10.74,10.74,0,0,0-3.36-1.9l-2.91,5a4.54,4.54,0,0,0,1.69,6.24A4.66,4.66,0,0,0,4.62,44H19.15a19.4,19.4,0,0,0-8-17.31l2.31-4A23.87,23.87,0,0,1,23.76,44H36.07a35.88,35.88,0,0,0-16.41-31.8l4.67-8a.77.77,0,0,1,1.05-.27c.53.29,20.29,34.77,20.66,35.17a.76.76,0,0,1-.68,1.13H40.6q.09,1.91,0,3.81h4.78A4.59,4.59,0,0,0,50,39.43a4.49,4.49,0,0,0-.62-2.28Z"
></path>
</svg>
</h1>
<p>Get started by sending us a sample error:</p>
<button
type="button"
style={{
padding: "12px",
cursor: "pointer",
backgroundColor: "#AD6CAA",
borderRadius: "4px",
border: "none",
color: "white",
fontSize: "14px",
margin: "18px",
}}
onClick={async () => {
const transaction = Sentry.startTransaction({
name: "Example Frontend Transaction",
});
Sentry.configureScope((scope) => {
scope.setSpan(transaction);
});
try {
const res = await fetch("/api/sentry-example-api");
if (!res.ok) {
throw new Error("Sentry Example Frontend Error");
}
} finally {
transaction.finish();
}
}}
>
Throw error!
</button>
<p>
Next, look for the error on the{" "}
<a href="https://openpipe.sentry.io/issues/?project=4505642011394048">Issues Page</a>.
</p>
<p style={{ marginTop: "24px" }}>
For more information, see{" "}
<a href="https://docs.sentry.io/platforms/javascript/guides/nextjs/">
https://docs.sentry.io/platforms/javascript/guides/nextjs/
</a>
</p>
</main>
</div>
);
}

View File

@@ -18,6 +18,7 @@ import {
VStack,
useInterval,
Image,
Flex,
} from "@chakra-ui/react";
import { signIn, useSession } from "next-auth/react";
import Head from "next/head";
@@ -27,24 +28,45 @@ import UserMenu from "~/components/nav/UserMenu";
import { api } from "~/utils/api";
import dayjs from "~/utils/dayjs";
import { useHandledAsyncCallback } from "~/utils/hooks";
import GitHubButton from "react-github-btn";
const TopNavbar = () => (
<DarkMode>
<GlobalStyle />
<HStack px={4} py={2}>
<HStack as={Link} href="/" _hover={{ textDecoration: "none" }} spacing={0} py={2} pr={16}>
<Image src="/logo.svg" alt="" boxSize={6} mr={4} />
<Heading size="md" fontFamily="inconsolata, monospace">
OpenPipe
</Heading>
</HStack>
<HStack px={4} py={2} align="center" justify="center">
<HStack
as={Link}
href="/"
_hover={{ textDecoration: "none" }}
spacing={0}
py={2}
pr={16}
flex={1}
sx={{
".widget": {
display: "block",
},
}}
>
<Image src="/logo.svg" alt="" boxSize={6} mr={4} />
<Heading size="md" fontFamily="inconsolata, monospace">
OpenPipe
</Heading>
</HStack>
</DarkMode>
<Box pt="6px">
<GitHubButton
href="https://github.com/openpipe/openpipe"
data-color-scheme="no-preference: dark; light: dark; dark: dark;"
data-size="large"
aria-label="Follow @openpipe on GitHub"
>
Github
</GitHubButton>
</Box>
</HStack>
);
// Shows how long until the competition starts. Refreshes every second
function CountdownTimer(props: { date: Date } & TextProps) {
const [now, setNow] = useState(dayjs(0));
const [now, setNow] = useState(dayjs());
useInterval(() => {
setNow(dayjs());
@@ -52,7 +74,7 @@ function CountdownTimer(props: { date: Date } & TextProps) {
const { date, ...rest } = props;
const kickoff = dayjs(props.date);
const kickoff = dayjs(date);
const diff = kickoff.diff(now, "second");
const days = Math.floor(diff / 86400);
const hours = Math.floor((diff % 86400) / 3600);
@@ -60,7 +82,7 @@ function CountdownTimer(props: { date: Date } & TextProps) {
const seconds = Math.floor(diff % 60);
return (
<Text {...rest}>
<Text {...rest} suppressHydrationWarning>
<Text as="span" fontWeight="bold">
Kickoff in
</Text>{" "}
@@ -103,8 +125,16 @@ function ApplicationStatus(props: BoxProps) {
} else if (user) {
return (
<Wrapper>
<HStack spacing={8}>
<UserMenu user={user} borderRadius={2} borderColor={"gray.700"} borderWidth={1} pr={6} />
<Flex flexDirection={{ base: "column", md: "row" }} alignItems="center">
<UserMenu
user={user}
borderRadius={2}
borderColor={"gray.700"}
borderWidth={1}
pr={6}
mr={{ base: 0, md: 8 }}
mb={{ base: 8, md: 0 }}
/>
<Box flex={1}>
{entrant?.approved ? (
<Text fontSize="sm">
@@ -112,7 +142,16 @@ function ApplicationStatus(props: BoxProps) {
</Text>
) : entrant ? (
<Text fontSize="sm">
Application submitted successfully! We'll notify you by email before August 14th.
Application submitted successfully. We'll notify you by email before August 14th.{" "}
<Link
href="https://github.com/openpipe/openpipe"
isExternal
textDecor="underline"
fontWeight="bold"
>
Star our Github ⭐
</Link>{" "}
for updates while you wait!
</Text>
) : (
<Button onClick={onApply} colorScheme="orange">
@@ -120,7 +159,7 @@ function ApplicationStatus(props: BoxProps) {
</Button>
)}
</Box>
</HStack>
</Flex>
</Wrapper>
);
}
@@ -143,10 +182,12 @@ export default function Signup() {
/>
</Head>
<Box bgColor="gray.900" color="gray.200" minH="100vh" w="full">
<Box color="gray.200" minH="100vh" w="full">
<TopNavbar />
<VStack mx="auto" py={24} maxW="2xl" align="start" fontSize="lg">
<Heading size="lg">🏆 Prompt Engineering World Championships</Heading>
<VStack mx="auto" py={24} maxW="2xl" px={4} align="center" fontSize="lg">
<Heading size="lg" textAlign="center">
🏆 Prompt Engineering World Championships
</Heading>
<CountdownTimer
date={new Date("2023-08-14T00:00:00Z")}
fontSize="2xl"
@@ -156,7 +197,7 @@ export default function Signup() {
<ApplicationStatus py={8} alignSelf="center" />
<Text fontSize="lg">
<Text fontSize="lg" textAlign="left">
Think you have what it takes to be the best? Compete with the world's top prompt
engineers and see where you rank!
</Text>
@@ -165,7 +206,14 @@ export default function Signup() {
Event Details
</Heading>
<Table variant="simple">
<Tbody>
<Tbody
sx={{
th: {
base: { px: 0 },
md: { px: 6 },
},
}}
>
<Tr>
<Th>Kickoff</Th>
<Td>August 14</Td>

View File

@@ -1,5 +1,5 @@
import { expect, test } from "vitest";
import { stripTypes } from "./formatPromptConstructor";
import { stripTypes } from "./format";
test("stripTypes", () => {
expect(stripTypes(`const foo: string = "bar";`)).toBe(`const foo = "bar";`);

View File

@@ -1,10 +1,10 @@
import "dotenv/config";
import dedent from "dedent";
import { expect, test } from "vitest";
import { migrate1to2 } from "./migrateConstructFns";
import { migrate1to2, migrate2to3 } from "./migrate";
test("migrate1to2", () => {
const constructFn = dedent`
const promptConstructor = dedent`
// Test comment
prompt = {
@@ -18,7 +18,7 @@ test("migrate1to2", () => {
}
`;
const migrated = migrate1to2(constructFn);
const migrated = migrate1to2(promptConstructor);
expect(migrated).toBe(dedent`
// Test comment
@@ -32,14 +32,25 @@ test("migrate1to2", () => {
]
})
`);
// console.log(
// migrateConstructFn(dedent`definePrompt(
// "openai/ChatCompletion",
// {
// model: 'gpt-3.5-turbo-0613',
// messages: []
// }
// )`),
// );
});
test("migrate2to3", () => {
const promptConstructor = dedent`
// Test comment
definePrompt("anthropic", {
model: "claude-2.0",
prompt: "What is the capital of China?"
})
`;
const migrated = migrate2to3(promptConstructor);
expect(migrated).toBe(dedent`
// Test comment
definePrompt("anthropic/completion", {
model: "claude-2.0",
prompt: "What is the capital of China?"
})
`);
});

View File

@@ -0,0 +1,125 @@
import "dotenv/config";
import * as recast from "recast";
import { type ASTNode } from "ast-types";
import { fileURLToPath } from "url";
import parsePromptConstructor from "./parse";
import { prisma } from "~/server/db";
import { promptConstructorVersion } from "./version";
const { builders: b } = recast.types;
export const migrate1to2 = (fnBody: string): string => {
const ast: ASTNode = recast.parse(fnBody);
recast.visit(ast, {
visitAssignmentExpression(path) {
const node = path.node;
if ("name" in node.left && node.left.name === "prompt") {
const functionCall = b.callExpression(b.identifier("definePrompt"), [
b.literal("openai/ChatCompletion"),
node.right,
]);
path.replace(functionCall);
}
return false;
},
});
return recast.print(ast).code;
};
export const migrate2to3 = (fnBody: string): string => {
const ast: ASTNode = recast.parse(fnBody);
recast.visit(ast, {
visitCallExpression(path) {
const node = path.node;
// Check if the function being called is 'definePrompt'
if (
recast.types.namedTypes.Identifier.check(node.callee) &&
node.callee.name === "definePrompt" &&
node.arguments.length > 0 &&
recast.types.namedTypes.Literal.check(node.arguments[0]) &&
node.arguments[0].value === "anthropic"
) {
node.arguments[0].value = "anthropic/completion";
}
return false;
},
});
return recast.print(ast).code;
};
const migrations: Record<number, (fnBody: string) => string> = {
2: migrate1to2,
3: migrate2to3,
};
const applyMigrations = (
promptConstructor: string,
currentVersion: number,
targetVersion: number,
) => {
let migratedFn = promptConstructor;
for (let v = currentVersion + 1; v <= targetVersion; v++) {
const migrationFn = migrations[v];
if (migrationFn) {
migratedFn = migrationFn(migratedFn);
}
}
return migratedFn;
};
export default async function migrateConstructFns(targetVersion: number) {
const prompts = await prisma.promptVariant.findMany({
where: { promptConstructorVersion: { lt: targetVersion } },
});
console.log(`Migrating ${prompts.length} prompts to version ${targetVersion}`);
await Promise.all(
prompts.map(async (variant) => {
const currentVersion = variant.promptConstructorVersion;
try {
const migratedFn = applyMigrations(
variant.promptConstructor,
currentVersion,
targetVersion,
);
const parsedFn = await parsePromptConstructor(migratedFn);
if ("error" in parsedFn) {
throw new Error(parsedFn.error);
}
await prisma.promptVariant.update({
where: {
id: variant.id,
},
data: {
promptConstructor: migratedFn,
promptConstructorVersion: targetVersion,
modelProvider: parsedFn.modelProvider,
model: parsedFn.model,
},
});
} catch (e) {
console.error("Error migrating promptConstructor for variant", variant.id, e);
}
}),
);
}
// If we're running this file directly, run the migration to the latest version
if (process.argv.at(-1) === fileURLToPath(import.meta.url)) {
const latestVersion = Math.max(...Object.keys(migrations).map(Number));
if (latestVersion !== promptConstructorVersion) {
throw new Error(
`The latest migration is ${latestVersion}, but the promptConstructorVersion is ${promptConstructorVersion}`,
);
}
await migrateConstructFns(promptConstructorVersion);
console.log("Done");
}

View File

@@ -1,11 +1,11 @@
import { expect, test } from "vitest";
import parseConstructFn from "./parseConstructFn";
import parsePromptConstructor from "./parse";
import assert from "assert";
// Note: this has to be run with `vitest --no-threads` option or else
// isolated-vm seems to throw errors
test("parseConstructFn", async () => {
const constructed = await parseConstructFn(
test("parsePromptConstructor", async () => {
const constructed = await parsePromptConstructor(
`
// These sometimes have a comment
@@ -38,7 +38,7 @@ test("parseConstructFn", async () => {
});
test("bad syntax", async () => {
const parsed = await parseConstructFn(`definePrompt("openai/ChatCompletion", {`);
const parsed = await parsePromptConstructor(`definePrompt("openai/ChatCompletion", {`);
assert("error" in parsed);
expect(parsed.error).toContain("Unexpected end of input");

View File

@@ -4,7 +4,7 @@ import { isObject, isString } from "lodash-es";
import { type JsonObject } from "type-fest";
import { validate } from "jsonschema";
export type ParsedConstructFn<T extends keyof typeof modelProviders> = {
export type ParsedPromptConstructor<T extends keyof typeof modelProviders> = {
modelProvider: T;
model: keyof (typeof modelProviders)[T]["models"];
modelInput: Parameters<(typeof modelProviders)[T]["getModel"]>[0];
@@ -12,12 +12,12 @@ export type ParsedConstructFn<T extends keyof typeof modelProviders> = {
const isolate = new ivm.Isolate({ memoryLimit: 128 });
export default async function parseConstructFn(
constructFn: string,
export default async function parsePromptConstructor(
promptConstructor: string,
scenario: JsonObject | undefined = {},
): Promise<ParsedConstructFn<keyof typeof modelProviders> | { error: string }> {
): Promise<ParsedPromptConstructor<keyof typeof modelProviders> | { error: string }> {
try {
const modifiedConstructFn = constructFn.replace(
const modifiedConstructFn = promptConstructor.replace(
"definePrompt(",
"global.prompt = definePrompt(",
);

View File

@@ -0,0 +1 @@
export const promptConstructorVersion = 3;

View File

@@ -0,0 +1,108 @@
import { type ChatCompletion } from "openai/resources/chat";
import { openai } from "../../utils/openai";
import { isAxiosError } from "./utils";
import { type APIResponse } from "openai/core";
import { sleep } from "~/server/utils/sleep";
const MAX_AUTO_RETRIES = 50;
const MIN_DELAY = 500; // milliseconds
const MAX_DELAY = 15000; // milliseconds
function calculateDelay(numPreviousTries: number): number {
const baseDelay = Math.min(MAX_DELAY, MIN_DELAY * Math.pow(2, numPreviousTries));
const jitter = Math.random() * baseDelay;
return baseDelay + jitter;
}
const getCompletionWithBackoff = async (
getCompletion: () => Promise<APIResponse<ChatCompletion>>,
) => {
let completion;
let tries = 0;
while (tries < MAX_AUTO_RETRIES) {
try {
completion = await getCompletion();
break;
} catch (e) {
if (isAxiosError(e)) {
console.error(e?.response?.data?.error?.message);
} else {
await sleep(calculateDelay(tries));
console.error(e);
}
}
tries++;
}
return completion;
};
// TODO: Add seeds to ensure batches don't contain duplicate data
const MAX_BATCH_SIZE = 5;
export const autogenerateDatasetEntries = async (
numToGenerate: number,
inputDescription: string,
outputDescription: string,
): Promise<{ input: string; output: string }[]> => {
const batchSizes = Array.from({ length: Math.ceil(numToGenerate / MAX_BATCH_SIZE) }, (_, i) =>
i === Math.ceil(numToGenerate / MAX_BATCH_SIZE) - 1 && numToGenerate % MAX_BATCH_SIZE
? numToGenerate % MAX_BATCH_SIZE
: MAX_BATCH_SIZE,
);
const getCompletion = (batchSize: number) =>
openai.chat.completions.create({
model: "gpt-4",
messages: [
{
role: "system",
content: `The user needs ${batchSize} rows of data, each with an input and an output.\n---\n The input should follow these requirements: ${inputDescription}\n---\n The output should follow these requirements: ${outputDescription}`,
},
],
functions: [
{
name: "add_list_of_data",
description: "Add a list of data to the database",
parameters: {
type: "object",
properties: {
rows: {
type: "array",
description: "The rows of data that match the description",
items: {
type: "object",
properties: {
input: {
type: "string",
description: "The input for this row",
},
output: {
type: "string",
description: "The output for this row",
},
},
},
},
},
},
},
],
function_call: { name: "add_list_of_data" },
temperature: 0.5,
});
const completionCallbacks = batchSizes.map((batchSize) =>
getCompletionWithBackoff(() => getCompletion(batchSize)),
);
const completions = await Promise.all(completionCallbacks);
const rows = completions.flatMap((completion) => {
const parsed = JSON.parse(
completion?.choices[0]?.message?.function_call?.arguments ?? "{rows: []}",
) as { rows: { input: string; output: string }[] };
return parsed.rows;
});
return rows;
};

View File

@@ -1,26 +1,9 @@
import { type CompletionCreateParams } from "openai/resources/chat";
import { prisma } from "../db";
import { openai } from "../utils/openai";
import { prisma } from "../../db";
import { openai } from "../../utils/openai";
import { pick } from "lodash-es";
import { isAxiosError } from "./utils";
type AxiosError = {
response?: {
data?: {
error?: {
message?: string;
};
};
};
};
function isAxiosError(error: unknown): error is AxiosError {
if (typeof error === "object" && error !== null) {
// Initial check
const err = error as AxiosError;
return err.response?.data?.error?.message !== undefined; // Check structure
}
return false;
}
export const autogenerateScenarioValues = async (
experimentId: string,
): Promise<Record<string, string>> => {
@@ -68,7 +51,7 @@ export const autogenerateScenarioValues = async (
messages.push({
role: "user",
content: `Prompt constructor function:\n---\n${prompt.constructFn}`,
content: `Prompt constructor function:\n---\n${prompt.promptConstructor}`,
});
existingScenarios

View File

@@ -0,0 +1,18 @@
type AxiosError = {
response?: {
data?: {
error?: {
message?: string;
};
};
};
};
export function isAxiosError(error: unknown): error is AxiosError {
if (typeof error === "object" && error !== null) {
// Initial check
const err = error as AxiosError;
return err.response?.data?.error?.message !== undefined; // Check structure
}
return false;
}

View File

@@ -6,6 +6,8 @@ import { scenarioVariantCellsRouter } from "./routers/scenarioVariantCells.route
import { templateVarsRouter } from "./routers/templateVariables.router";
import { evaluationsRouter } from "./routers/evaluations.router";
import { worldChampsRouter } from "./routers/worldChamps.router";
import { datasetsRouter } from "./routers/datasets.router";
import { datasetEntries } from "./routers/datasetEntries.router";
/**
* This is the primary router for your server.
@@ -20,6 +22,8 @@ export const appRouter = createTRPCRouter({
templateVars: templateVarsRouter,
evaluations: evaluationsRouter,
worldChamps: worldChampsRouter,
datasets: datasetsRouter,
datasetEntries: datasetEntries,
});
// export type definition of API

View File

@@ -0,0 +1,149 @@
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { prisma } from "~/server/db";
import { requireCanModifyDataset, requireCanViewDataset } from "~/utils/accessControl";
import { autogenerateDatasetEntries } from "../autogenerate/autogenerateDatasetEntries";
const PAGE_SIZE = 10;
export const datasetEntries = createTRPCRouter({
list: protectedProcedure
.input(z.object({ datasetId: z.string(), page: z.number() }))
.query(async ({ input, ctx }) => {
await requireCanViewDataset(input.datasetId, ctx);
const { datasetId, page } = input;
const entries = await prisma.datasetEntry.findMany({
where: {
datasetId,
},
orderBy: { createdAt: "desc" },
skip: (page - 1) * PAGE_SIZE,
take: PAGE_SIZE,
});
const count = await prisma.datasetEntry.count({
where: {
datasetId,
},
});
return {
entries,
startIndex: (page - 1) * PAGE_SIZE + 1,
lastPage: Math.ceil(count / PAGE_SIZE),
count,
};
}),
createOne: protectedProcedure
.input(
z.object({
datasetId: z.string(),
input: z.string(),
output: z.string().optional(),
}),
)
.mutation(async ({ input, ctx }) => {
await requireCanModifyDataset(input.datasetId, ctx);
return await prisma.datasetEntry.create({
data: {
datasetId: input.datasetId,
input: input.input,
output: input.output,
},
});
}),
autogenerateEntries: protectedProcedure
.input(
z.object({
datasetId: z.string(),
numToGenerate: z.number(),
inputDescription: z.string(),
outputDescription: z.string(),
}),
)
.mutation(async ({ input, ctx }) => {
await requireCanModifyDataset(input.datasetId, ctx);
const dataset = await prisma.dataset.findUnique({
where: {
id: input.datasetId,
},
});
if (!dataset) {
throw new Error(`Dataset with id ${input.datasetId} does not exist`);
}
const entries = await autogenerateDatasetEntries(
input.numToGenerate,
input.inputDescription,
input.outputDescription,
);
const createdEntries = await prisma.datasetEntry.createMany({
data: entries.map((entry) => ({
datasetId: input.datasetId,
input: entry.input,
output: entry.output,
})),
});
return createdEntries;
}),
delete: protectedProcedure
.input(z.object({ id: z.string() }))
.mutation(async ({ input, ctx }) => {
const datasetId = (
await prisma.datasetEntry.findUniqueOrThrow({
where: { id: input.id },
})
).datasetId;
await requireCanModifyDataset(datasetId, ctx);
return await prisma.datasetEntry.delete({
where: {
id: input.id,
},
});
}),
update: protectedProcedure
.input(
z.object({
id: z.string(),
updates: z.object({
input: z.string(),
output: z.string().optional(),
}),
}),
)
.mutation(async ({ input, ctx }) => {
const existing = await prisma.datasetEntry.findUnique({
where: {
id: input.id,
},
});
if (!existing) {
throw new Error(`dataEntry with id ${input.id} does not exist`);
}
await requireCanModifyDataset(existing.datasetId, ctx);
return await prisma.datasetEntry.update({
where: {
id: input.id,
},
data: {
input: input.updates.input,
output: input.updates.output,
},
});
}),
});

View File

@@ -0,0 +1,91 @@
import { z } from "zod";
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
import { prisma } from "~/server/db";
import {
requireCanModifyDataset,
requireCanViewDataset,
requireNothing,
} from "~/utils/accessControl";
import userOrg from "~/server/utils/userOrg";
export const datasetsRouter = createTRPCRouter({
list: protectedProcedure.query(async ({ ctx }) => {
// Anyone can list experiments
requireNothing(ctx);
const datasets = await prisma.dataset.findMany({
where: {
organization: {
organizationUsers: {
some: { userId: ctx.session.user.id },
},
},
},
orderBy: {
createdAt: "desc",
},
include: {
_count: {
select: { datasetEntries: true },
},
},
});
return datasets;
}),
get: publicProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
await requireCanViewDataset(input.id, ctx);
return await prisma.dataset.findFirstOrThrow({
where: { id: input.id },
});
}),
create: protectedProcedure.input(z.object({})).mutation(async ({ ctx }) => {
// Anyone can create an experiment
requireNothing(ctx);
const numDatasets = await prisma.dataset.count({
where: {
organization: {
organizationUsers: {
some: { userId: ctx.session.user.id },
},
},
},
});
return await prisma.dataset.create({
data: {
name: `Dataset ${numDatasets + 1}`,
organizationId: (await userOrg(ctx.session.user.id)).id,
},
});
}),
update: protectedProcedure
.input(z.object({ id: z.string(), updates: z.object({ name: z.string() }) }))
.mutation(async ({ input, ctx }) => {
await requireCanModifyDataset(input.id, ctx);
return await prisma.dataset.update({
where: {
id: input.id,
},
data: {
name: input.updates.name,
},
});
}),
delete: protectedProcedure
.input(z.object({ id: z.string() }))
.mutation(async ({ input, ctx }) => {
await requireCanModifyDataset(input.id, ctx);
await prisma.dataset.delete({
where: {
id: input.id,
},
});
}),
});

View File

@@ -13,6 +13,7 @@ import {
} from "~/utils/accessControl";
import userOrg from "~/server/utils/userOrg";
import generateTypes from "~/modelProviders/generateTypes";
import { promptConstructorVersion } from "~/promptConstructor/version";
export const experimentsRouter = createTRPCRouter({
stats: publicProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
@@ -293,12 +294,15 @@ export const experimentsRouter = createTRPCRouter({
// Anyone can create an experiment
requireNothing(ctx);
const organizationId = (await userOrg(ctx.session.user.id)).id;
const maxSortIndex =
(
await prisma.experiment.aggregate({
_max: {
sortIndex: true,
},
where: { organizationId },
})
)._max?.sortIndex ?? 0;
@@ -306,7 +310,7 @@ export const experimentsRouter = createTRPCRouter({
data: {
sortIndex: maxSortIndex + 1,
label: `Experiment ${maxSortIndex + 1}`,
organizationId: (await userOrg(ctx.session.user.id)).id,
organizationId,
},
});
@@ -318,7 +322,7 @@ export const experimentsRouter = createTRPCRouter({
sortIndex: 0,
// The interpolated $ is necessary until dedent incorporates
// https://github.com/dmnd/dedent/pull/46
constructFn: dedent`
promptConstructor: dedent`
/**
* Use Javascript to define an OpenAI chat completion
* (https://platform.openai.com/docs/api-reference/chat/create).
@@ -339,7 +343,7 @@ export const experimentsRouter = createTRPCRouter({
});`,
model: "gpt-3.5-turbo-0613",
modelProvider: "openai/ChatCompletion",
constructFnVersion: 2,
promptConstructorVersion,
},
}),
prisma.templateVariable.create({

View File

@@ -9,9 +9,10 @@ import { reorderPromptVariants } from "~/server/utils/reorderPromptVariants";
import { type PromptVariant } from "@prisma/client";
import { deriveNewConstructFn } from "~/server/utils/deriveNewContructFn";
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
import parseConstructFn from "~/server/utils/parseConstructFn";
import modelProviders from "~/modelProviders/modelProviders";
import { ZodSupportedProvider } from "~/modelProviders/types";
import parsePromptConstructor from "~/promptConstructor/parse";
import { promptConstructorVersion } from "~/promptConstructor/version";
export const promptVariantsRouter = createTRPCRouter({
list: publicProcedure
@@ -199,8 +200,9 @@ export const promptVariantsRouter = createTRPCRouter({
experimentId: input.experimentId,
label: newVariantLabel,
sortIndex: (originalVariant?.sortIndex ?? 0) + 1,
constructFn: newConstructFn,
constructFnVersion: 2,
promptConstructor: newConstructFn,
promptConstructorVersion:
originalVariant?.promptConstructorVersion ?? promptConstructorVersion,
model: originalVariant?.model ?? "gpt-3.5-turbo",
modelProvider: originalVariant?.modelProvider ?? "openai/ChatCompletion",
},
@@ -310,7 +312,7 @@ export const promptVariantsRouter = createTRPCRouter({
});
await requireCanModifyExperiment(existing.experimentId, ctx);
const constructedPrompt = await parseConstructFn(existing.constructFn);
const constructedPrompt = await parsePromptConstructor(existing.promptConstructor);
if ("error" in constructedPrompt) {
return userError(constructedPrompt.error);
@@ -332,7 +334,7 @@ export const promptVariantsRouter = createTRPCRouter({
.input(
z.object({
id: z.string(),
constructFn: z.string(),
promptConstructor: z.string(),
streamScenarios: z.array(z.string()),
}),
)
@@ -348,7 +350,7 @@ export const promptVariantsRouter = createTRPCRouter({
throw new Error(`Prompt Variant with id ${input.id} does not exist`);
}
const parsedPrompt = await parseConstructFn(input.constructFn);
const parsedPrompt = await parsePromptConstructor(input.promptConstructor);
if ("error" in parsedPrompt) {
return userError(parsedPrompt.error);
@@ -361,8 +363,8 @@ export const promptVariantsRouter = createTRPCRouter({
label: existing.label,
sortIndex: existing.sortIndex,
uiId: existing.uiId,
constructFn: input.constructFn,
constructFnVersion: 2,
promptConstructor: input.promptConstructor,
promptConstructorVersion: existing.promptConstructorVersion,
modelProvider: parsedPrompt.modelProvider,
model: parsedPrompt.model,
},

View File

@@ -1,7 +1,7 @@
import { z } from "zod";
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
import { prisma } from "~/server/db";
import { autogenerateScenarioValues } from "../autogen";
import { autogenerateScenarioValues } from "../autogenerate/autogenerateScenarioValues";
import { recordExperimentUpdated } from "~/server/utils/recordExperimentUpdated";
import { runAllEvals } from "~/server/utils/evaluations";
import { generateNewCell } from "~/server/utils/generateNewCell";

View File

@@ -3,7 +3,7 @@ import { prisma } from "~/server/db";
import { requireNothing } from "~/utils/accessControl";
export const worldChampsRouter = createTRPCRouter({
userStatus: publicProcedure.query(async ({ input, ctx }) => {
userStatus: publicProcedure.query(async ({ ctx }) => {
const userId = ctx.session?.user.id;
if (!userId) {

View File

@@ -14,6 +14,7 @@ import superjson from "superjson";
import { ZodError } from "zod";
import { getServerAuthSession } from "~/server/auth";
import { prisma } from "~/server/db";
import { capturePath } from "~/utils/analytics/serverAnalytics";
/**
* 1. CONTEXT
@@ -112,7 +113,7 @@ export const createTRPCRouter = t.router;
export const publicProcedure = t.procedure;
/** Reusable middleware that enforces users are logged in before running the procedure. */
const enforceUserIsAuthed = t.middleware(async ({ ctx, next }) => {
const enforceUserIsAuthed = t.middleware(async ({ ctx, next, path }) => {
if (!ctx.session || !ctx.session.user) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
@@ -134,6 +135,8 @@ const enforceUserIsAuthed = t.middleware(async ({ ctx, next }) => {
"Protected routes must perform access control checks then explicitly invoke the `ctx.markAccessControlRun()` function to ensure we don't forget access control on a route.",
});
capturePath(ctx.session, path);
return resp;
});

View File

@@ -1,58 +0,0 @@
import * as recast from "recast";
import { type ASTNode } from "ast-types";
import { prisma } from "../db";
import { fileURLToPath } from "url";
const { builders: b } = recast.types;
export const migrate1to2 = (fnBody: string): string => {
const ast: ASTNode = recast.parse(fnBody);
recast.visit(ast, {
visitAssignmentExpression(path) {
const node = path.node;
if ("name" in node.left && node.left.name === "prompt") {
const functionCall = b.callExpression(b.identifier("definePrompt"), [
b.literal("openai/ChatCompletion"),
node.right,
]);
path.replace(functionCall);
}
return false;
},
});
return recast.print(ast).code;
};
export default async function migrateConstructFns() {
const v1Prompts = await prisma.promptVariant.findMany({
where: {
constructFnVersion: 1,
},
});
console.log(`Migrating ${v1Prompts.length} prompts 1->2`);
await Promise.all(
v1Prompts.map(async (variant) => {
try {
await prisma.promptVariant.update({
where: {
id: variant.id,
},
data: {
constructFn: migrate1to2(variant.constructFn),
constructFnVersion: 2,
},
});
} catch (e) {
console.error("Error migrating constructFn for variant", variant.id, e);
}
}),
);
}
// If we're running this file directly, run the migration
if (process.argv.at(-1) === fileURLToPath(import.meta.url)) {
console.log("Running migration");
await migrateConstructFns();
console.log("Done");
}

View File

@@ -5,8 +5,8 @@ import { prisma } from "~/server/db";
import { wsConnection } from "~/utils/wsConnection";
import { runEvalsForOutput } from "../utils/evaluations";
import hashPrompt from "../utils/hashPrompt";
import parseConstructFn from "../utils/parseConstructFn";
import defineTask from "./defineTask";
import parsePromptConstructor from "~/promptConstructor/parse";
export type QueryModelJob = {
cellId: string;
@@ -75,7 +75,10 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
return;
}
const prompt = await parseConstructFn(variant.constructFn, scenario.variableValues as JsonObject);
const prompt = await parsePromptConstructor(
variant.promptConstructor,
scenario.variableValues as JsonObject,
);
if ("error" in prompt) {
await prisma.scenarioVariantCell.update({

View File

@@ -3,8 +3,8 @@ import ivm from "isolated-vm";
import dedent from "dedent";
import { openai } from "./openai";
import { isObject } from "lodash-es";
import { type CompletionCreateParams } from "openai/resources/chat/completions";
import formatPromptConstructor from "~/utils/formatPromptConstructor";
import type { CreateChatCompletionRequestMessage } from "openai/resources/chat/completions";
import formatPromptConstructor from "~/promptConstructor/format";
import { type SupportedProvider, type Model } from "~/modelProviders/types";
import modelProviders from "~/modelProviders/modelProviders";
@@ -16,7 +16,7 @@ export async function deriveNewConstructFn(
instructions?: string,
) {
if (originalVariant && !newModel && !instructions) {
return originalVariant.constructFn;
return originalVariant.promptConstructor;
}
if (originalVariant && (newModel || instructions)) {
return await requestUpdatedPromptFunction(originalVariant, newModel, instructions);
@@ -44,7 +44,7 @@ const requestUpdatedPromptFunction = async (
let newContructionFn = "";
for (let i = 0; i < NUM_RETRIES; i++) {
try {
const messages: CompletionCreateParams.CreateChatCompletionRequestNonStreaming.Message[] = [
const messages: CreateChatCompletionRequestMessage[] = [
{
role: "system",
content: `Your job is to update prompt constructor functions. Here is the api shape for the current model:\n---\n${JSON.stringify(
@@ -55,7 +55,7 @@ const requestUpdatedPromptFunction = async (
},
{
role: "user",
content: `This is the current prompt constructor function:\n---\n${originalVariant.constructFn}`,
content: `This is the current prompt constructor function:\n---\n${originalVariant.promptConstructor}`,
},
];
if (newModel) {
@@ -66,9 +66,11 @@ const requestUpdatedPromptFunction = async (
if (newModel.provider !== originalModel.provider) {
messages.push({
role: "user",
content: `The old provider was ${originalModel.provider}. The new provider is ${
content: `As seen in the first argument to definePrompt, the old provider endpoint was "${
originalModel.provider
}". The new provider endpoint is "${
newModel.provider
}. Here is the schema for the new model:\n---\n${JSON.stringify(
}". Here is the schema for the new model:\n---\n${JSON.stringify(
modelProviders[newModel.provider].inputSchema,
null,
2,

View File

@@ -1,10 +1,10 @@
import { Prisma } from "@prisma/client";
import { prisma } from "../db";
import parseConstructFn from "./parseConstructFn";
import { type JsonObject } from "type-fest";
import hashPrompt from "./hashPrompt";
import { omit } from "lodash-es";
import { queueQueryModel } from "../tasks/queryModel.task";
import parsePromptConstructor from "~/promptConstructor/parse";
export const generateNewCell = async (
variantId: string,
@@ -41,8 +41,8 @@ export const generateNewCell = async (
if (cell) return;
const parsedConstructFn = await parseConstructFn(
variant.constructFn,
const parsedConstructFn = await parsePromptConstructor(
variant.promptConstructor,
scenario.variableValues as JsonObject,
);

View File

@@ -1,6 +1,6 @@
import crypto from "crypto";
import { type JsonValue } from "type-fest";
import { type ParsedConstructFn } from "./parseConstructFn";
import { ParsedPromptConstructor } from "~/promptConstructor/parse";
function sortKeys(obj: JsonValue): JsonValue {
if (typeof obj !== "object" || obj === null) {
@@ -25,7 +25,7 @@ function sortKeys(obj: JsonValue): JsonValue {
return sortedObj;
}
export default function hashPrompt(prompt: ParsedConstructFn<any>): string {
export default function hashPrompt(prompt: ParsedPromptConstructor<any>): string {
// Sort object keys recursively
const sortedObj = sortKeys(prompt as unknown as JsonValue);

View File

@@ -1,7 +1,7 @@
import { type RouterOutputs } from "~/utils/api";
import { type SliceCreator } from "./store";
import loader from "@monaco-editor/loader";
import formatPromptConstructor from "~/utils/formatPromptConstructor";
import formatPromptConstructor from "~/promptConstructor/format";
export const editorBackground = "#fafafa";

View File

@@ -1,11 +1,31 @@
import { extendTheme } from "@chakra-ui/react";
import "@fontsource/inconsolata";
import { ChakraProvider } from "@chakra-ui/react";
import { modalAnatomy } from "@chakra-ui/anatomy";
import { createMultiStyleConfigHelpers } from "@chakra-ui/styled-system";
const systemFont =
'ui-sans-serif, -apple-system, "system-ui", "Segoe UI", Helvetica, "Apple Color Emoji", Arial, sans-serif, "Segoe UI Emoji", "Segoe UI Symbol"';
/* eslint-disable @typescript-eslint/unbound-method */
const { definePartsStyle, defineMultiStyleConfig } = createMultiStyleConfigHelpers(
modalAnatomy.keys,
);
const modalTheme = defineMultiStyleConfig({
baseStyle: definePartsStyle({
dialog: { borderRadius: "sm" },
}),
});
const theme = extendTheme({
styles: {
global: (props: { colorMode: "dark" | "light" }) => ({
"html, body": {
backgroundColor: props.colorMode === "dark" ? "gray.900" : "white",
},
}),
},
fonts: {
heading: systemFont,
body: systemFont,
@@ -32,6 +52,7 @@ const theme = extendTheme({
},
},
},
Modal: modalTheme,
},
});

View File

@@ -3,11 +3,46 @@ import { TRPCError } from "@trpc/server";
import { type TRPCContext } from "~/server/api/trpc";
import { prisma } from "~/server/db";
const isAdmin = async (userId: string) => {
const user = await prisma.user.findFirst({
where: { id: userId, role: "ADMIN" },
});
return !!user;
};
// No-op method for protected routes that really should be accessible to anyone.
export const requireNothing = (ctx: TRPCContext) => {
ctx.markAccessControlRun();
};
export const requireCanViewDataset = async (datasetId: string, ctx: TRPCContext) => {
const dataset = await prisma.dataset.findFirst({
where: {
id: datasetId,
organization: {
organizationUsers: {
some: {
role: { in: [OrganizationUserRole.ADMIN, OrganizationUserRole.MEMBER] },
userId: ctx.session?.user.id,
},
},
},
},
});
if (!dataset) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
ctx.markAccessControlRun();
};
export const requireCanModifyDataset = async (datasetId: string, ctx: TRPCContext) => {
// Right now all users who can view a dataset can also modify it
await requireCanViewDataset(datasetId, ctx);
};
export const requireCanViewExperiment = async (experimentId: string, ctx: TRPCContext) => {
await prisma.experiment.findFirst({
where: { id: experimentId },
@@ -18,21 +53,24 @@ export const requireCanViewExperiment = async (experimentId: string, ctx: TRPCCo
};
export const canModifyExperiment = async (experimentId: string, userId: string) => {
const experiment = await prisma.experiment.findFirst({
where: {
id: experimentId,
organization: {
organizationUsers: {
some: {
role: { in: [OrganizationUserRole.ADMIN, OrganizationUserRole.MEMBER] },
userId,
const [adminUser, experiment] = await Promise.all([
isAdmin(userId),
prisma.experiment.findFirst({
where: {
id: experimentId,
organization: {
organizationUsers: {
some: {
role: { in: [OrganizationUserRole.ADMIN, OrganizationUserRole.MEMBER] },
userId,
},
},
},
},
},
});
}),
]);
return !!experiment;
return adminUser || !!experiment;
};
export const requireCanModifyExperiment = async (experimentId: string, ctx: TRPCContext) => {

View File

@@ -1,13 +0,0 @@
// Make sure we're in the browser
import posthog from "posthog-js";
import { env } from "~/env.mjs";
const enableAnalytics = typeof window !== "undefined";
if (enableAnalytics) {
if (env.NEXT_PUBLIC_POSTHOG_KEY) {
posthog.init(env.NEXT_PUBLIC_POSTHOG_KEY, {
api_host: "https://app.posthog.com",
});
}
}

View File

@@ -0,0 +1,31 @@
import { type Session } from "next-auth";
import { useSession } from "next-auth/react";
import { useEffect } from "react";
import posthog from "posthog-js";
import { env } from "~/env.mjs";
// Make sure we're in the browser
const enableBrowserAnalytics = typeof window !== "undefined";
if (env.NEXT_PUBLIC_POSTHOG_KEY && enableBrowserAnalytics) {
posthog.init(env.NEXT_PUBLIC_POSTHOG_KEY, {
api_host: `${env.NEXT_PUBLIC_HOST}/ingest`,
});
}
export const identifySession = (session: Session) => {
if (!session.user) return;
posthog.identify(session.user.id, {
name: session.user.name,
email: session.user.email,
});
};
export const SessionIdentifier = () => {
const session = useSession().data;
useEffect(() => {
if (session && enableBrowserAnalytics) identifySession(session);
}, [session]);
return null;
};

View File

@@ -0,0 +1,14 @@
import { type Session } from "next-auth";
import { PostHog } from "posthog-node";
import { env } from "~/env.mjs";
export const posthogServerClient = env.NEXT_PUBLIC_POSTHOG_KEY
? new PostHog(env.NEXT_PUBLIC_POSTHOG_KEY, {
host: "https://app.posthog.com",
})
: null;
export const capturePath = (session: Session, path: string) => {
if (!session.user || !posthogServerClient) return;
posthogServerClient?.capture({ distinctId: session.user.id, event: path });
};

View File

@@ -17,6 +17,26 @@ export const useExperimentAccess = () => {
return useExperiment().data?.access ?? { canView: false, canModify: false };
};
export const useDataset = () => {
const router = useRouter();
const dataset = api.datasets.get.useQuery(
{ id: router.query.id as string },
{ enabled: !!router.query.id },
);
return dataset;
};
export const useDatasetEntries = () => {
const dataset = useDataset();
const [page] = usePage();
return api.datasetEntries.list.useQuery(
{ datasetId: dataset.data?.id ?? "", page },
{ enabled: dataset.data?.id != null },
);
};
type AsyncFunction<T extends unknown[], U> = (...args: T) => Promise<U>;
export function useHandledAsyncCallback<T extends unknown[], U>(

Some files were not shown because too many files have changed in this diff Show More