Compare commits

..

3 Commits

Author SHA1 Message Date
David Corbitt
03a8d094fc Add header to scenario modal 2023-08-18 00:07:48 -07:00
David Corbitt
2b990622f5 Hide expand button for empty scenario editor 2023-08-18 00:06:05 -07:00
David Corbitt
d079eba557 Allow user to delete scenario without variables 2023-08-18 00:02:26 -07:00
168 changed files with 2104 additions and 6960 deletions

View File

@@ -1,14 +0,0 @@
name: Sweep Fast Issue
title: 'Sweep (fast): '
description: For few-line fixes to be handled by Sweep, an AI-powered junior developer. Sweep will use GPT-3.5 to quickly create a PR for very small changes.
labels: sweep
body:
- type: textarea
id: description
attributes:
label: Details
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
placeholder: |
Bugs: The bug might be in ... file. Here are the logs: ...
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
Refactors: We are migrating this function to ... version because ...

View File

@@ -1,14 +0,0 @@
name: Sweep Slow Issue
title: 'Sweep (slow): '
description: For larger bugs, features, refactors, and tests to be handled by Sweep, an AI-powered junior developer. Sweep will perform a deeper search and more self-reviews but will take longer.
labels: sweep
body:
- type: textarea
id: description
attributes:
label: Details
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
placeholder: |
Bugs: The bug might be in ... file. Here are the logs: ...
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
Refactors: We are migrating this function to ... version because ...

View File

@@ -1,14 +0,0 @@
name: Sweep Issue
title: 'Sweep: '
description: For small bugs, features, refactors, and tests to be handled by Sweep, an AI-powered junior developer.
labels: sweep
body:
- type: textarea
id: description
attributes:
label: Details
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
placeholder: |
Bugs: The bug might be in ... file. Here are the logs: ...
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
Refactors: We are migrating this function to ... version because ...

1
.gitignore vendored
View File

@@ -3,4 +3,3 @@
*.pyc
node_modules/
*.tsbuildinfo
dist/

105
README.md
View File

@@ -1,53 +1,16 @@
<p align="center">
<a href="https://openpipe.ai">
<img height="70" src="https://github.com/openpipe/openpipe/assets/41524992/70af25fb-1f90-42d9-8a20-3606e3b5aaba" alt="logo">
</a>
</p>
<h1 align="center">
OpenPipe
</h1>
<!-- <img src="https://github.com/openpipe/openpipe/assets/41524992/ca59596e-eb80-40f9-921f-6d67f6e6d8fa" width="72px" /> -->
<p align="center">
<i>Turn expensive prompts into cheap fine-tuned models.</i>
</p>
# OpenPipe
<p align="center">
<a href="/LICENSE"><img alt="License Apache-2.0" src="https://img.shields.io/github/license/openpipe/openpipe?style=flat-square"></a>
<a href='http://makeapullrequest.com'><img alt='PRs Welcome' src='https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square'/></a>
<a href="https://github.com/openpipe/openpipe/graphs/commit-activity"><img alt="GitHub commit activity" src="https://img.shields.io/github/commit-activity/m/openpipe/openpipe?style=flat-square"/></a>
<a href="https://github.com/openpipe/openpipe/issues"><img alt="GitHub closed issues" src="https://img.shields.io/github/issues-closed/openpipe/openpipe?style=flat-square"/></a>
<img src="https://img.shields.io/badge/Y%20Combinator-S23-orange?style=flat-square" alt="Y Combinator S23">
</p>
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts, and can automatically [translate](#-translate-between-model-apis) those prompts between models.
<p align="center">
<a href="https://app.openpipe.ai/">Hosted App</a> - <a href="#running-locally">Running Locally</a> - <a href="#sample-experiments">Experiments</a>
</p>
<br>
Use powerful but expensive LLMs to fine-tune smaller and cheaper models suited to your exact needs. Evaluate model and prompt combinations in the playground. Query your past requests and export optimized training data. Try it out at https://app.openpipe.ai or <a href="#running-locally">run it locally</a>.
<br>
## Features
* <b>Experiment</b>
* Bulk-test wide-reaching scenarios using code templating.
* Seamlessly translate prompts across different model APIs.
* Tap into autogenerated scenarios for fresh test perspectives.
* <b>Fine-Tune (Beta)</b>
* Easy integration with OpenPipe's SDK in both Python and JS.
* Swiftly query logs using intuitive built-in filters.
* Export data in multiple training formats, including Alpaca and ChatGPT, with deduplication.
<img src="https://github.com/openpipe/openpipe/assets/41524992/eaa8b92d-4536-4f63-bbef-4b0b1a60f6b5" alt="fine-tune demo">
<!-- <img height="400px" src="https://github.com/openpipe/openpipe/assets/41524992/66bb1843-cb72-4130-a369-eec2df3b8201" alt="playground demo"> -->
<img src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="demo">
You can use our hosted version of OpenPipe at https://openpipe.ai. You can also clone this repository and [run it locally](#running-locally).
## Sample Experiments
These are sample experiments users have created that show how OpenPipe works. Feel free to fork them and start experimenting yourself.
These are simple experiments users have created that show how OpenPipe works. Feel free to fork them and start experimenting yourself.
- [Twitter Sentiment Analysis](https://app.openpipe.ai/experiments/62c20a73-2012-4a64-973c-4b665ad46a57)
- [Reddit User Needs](https://app.openpipe.ai/experiments/22222222-2222-2222-2222-222222222222)
@@ -56,25 +19,43 @@ These are sample experiments users have created that show how OpenPipe works. Fe
## Supported Models
#### OpenAI
- [GPT 3.5 Turbo](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
- [GPT 3.5 Turbo 16k](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
- [GPT 4](https://openai.com/gpt-4)
#### Llama2
- [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat)
- [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat)
- [70b chat](https://replicate.com/replicate/llama70b-v2-chat)
#### Llama2 Fine-Tunes
- [Open-Orca/OpenOrcaxOpenChat-Preview2-13B](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B)
- [Open-Orca/OpenOrca-Platypus2-13B](https://huggingface.co/Open-Orca/OpenOrca-Platypus2-13B)
- [NousResearch/Nous-Hermes-Llama2-13b](https://huggingface.co/NousResearch/Nous-Hermes-Llama2-13b)
- [jondurbin/airoboros-l2-13b-gpt4-2.0](https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0)
- [lmsys/vicuna-13b-v1.5](https://huggingface.co/lmsys/vicuna-13b-v1.5)
- [Gryphe/MythoMax-L2-13b](https://huggingface.co/Gryphe/MythoMax-L2-13b)
- [NousResearch/Nous-Hermes-llama-2-7b](https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b)
#### Anthropic
- [Claude 1 Instant](https://www.anthropic.com/index/introducing-claude)
- [Claude 2](https://www.anthropic.com/index/claude-2)
- All models available through the OpenAI [chat completion API](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
- Llama2 [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat), [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat), [70b chat](https://replicate.com/replicate/llama70b-v2-chat).
- Anthropic's [Claude 1 Instant](https://www.anthropic.com/index/introducing-claude) and [Claude 2](https://www.anthropic.com/index/claude-2)
## Features
### 🔍 Visualize Responses
Inspect prompt completions side-by-side.
### 🧪 Bulk-Test
OpenPipe lets you _template_ a prompt. Use the templating feature to run the prompts you're testing against many potential inputs for broad coverage of your problem space.
### 📟 Translate between Model APIs
Write your prompt in one format and automatically convert it to work with any other model.
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/1e19ccf2-96b6-4e93-a3a5-1449710d1b5b" alt="translate between models">
<br><br>
### 🛠️ Refine Your Prompts Automatically
Use a growing database of best-practice refinements to improve your prompts automatically.
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/87a27fe7-daef-445c-a5e2-1c82b23f9f99" alt="add function call">
<br><br>
### 🪄 Auto-generate Test Scenarios
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
<img width="600" src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="auto-generate">
<br><br>
## Running Locally

View File

@@ -19,9 +19,10 @@ declare module "nextjs-routes" {
| DynamicRoute<"/api/v1/[...trpc]", { "trpc": string[] }>
| StaticRoute<"/api/v1/openapi">
| StaticRoute<"/dashboard">
| DynamicRoute<"/data/[id]", { "id": string }>
| StaticRoute<"/data">
| DynamicRoute<"/experiments/[experimentSlug]", { "experimentSlug": string }>
| StaticRoute<"/experiments">
| StaticRoute<"/fine-tunes">
| StaticRoute<"/">
| DynamicRoute<"/invitations/[invitationToken]", { "invitationToken": string }>
| StaticRoute<"/project/settings">

View File

@@ -23,6 +23,7 @@ ARG NEXT_PUBLIC_SOCKET_URL
ARG NEXT_PUBLIC_HOST
ARG NEXT_PUBLIC_SENTRY_DSN
ARG SENTRY_AUTH_TOKEN
ARG NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS
WORKDIR /code
COPY --from=deps /code/node_modules ./node_modules
@@ -44,4 +45,4 @@ EXPOSE 3000
ENV PORT 3000
# Run the "run-prod.sh" script
CMD /code/app/scripts/run-prod.sh
CMD /code/app/run-prod.sh

View File

@@ -12,8 +12,8 @@
"build": "next build",
"dev:next": "TZ=UTC next dev",
"dev:wss": "pnpm tsx --watch src/wss-server.ts",
"worker": "NODE_ENV='development' pnpm tsx --watch src/server/tasks/worker.ts",
"dev": "concurrently --kill-others 'pnpm dev:next' 'pnpm dev:wss' 'pnpm worker --watch'",
"dev:worker": "NODE_ENV='development' pnpm tsx --watch src/server/tasks/worker.ts",
"dev": "concurrently --kill-others 'pnpm dev:next' 'pnpm dev:wss' 'pnpm dev:worker'",
"postinstall": "prisma generate",
"lint": "next lint",
"start": "TZ=UTC next start",
@@ -48,7 +48,6 @@
"@trpc/react-query": "^10.26.0",
"@trpc/server": "^10.26.0",
"@vercel/og": "^0.5.9",
"archiver": "^6.0.0",
"ast-types": "^0.14.2",
"chroma-js": "^2.4.2",
"concurrently": "^8.2.0",
@@ -61,7 +60,6 @@
"framer-motion": "^10.12.17",
"gpt-tokens": "^1.0.10",
"graphile-worker": "^0.13.0",
"human-id": "^4.0.0",
"immer": "^10.0.2",
"isolated-vm": "^4.5.0",
"json-schema-to-typescript": "^13.0.2",
@@ -79,8 +77,7 @@
"nextjs-routes": "^2.0.1",
"nodemailer": "^6.9.4",
"openai": "4.0.0-beta.7",
"openpipe": "^0.3.0",
"openpipe-dev": "workspace:^",
"openpipe": "workspace:*",
"pg": "^8.11.2",
"pluralize": "^8.0.0",
"posthog-js": "^1.75.3",
@@ -101,7 +98,6 @@
"replicate": "^0.12.3",
"socket.io": "^4.7.1",
"socket.io-client": "^4.7.1",
"stream-buffers": "^3.0.2",
"superjson": "1.12.2",
"trpc-openapi": "^1.2.0",
"tsx": "^3.12.7",
@@ -114,7 +110,6 @@
},
"devDependencies": {
"@openapi-contrib/openapi-schema-to-json-schema": "^4.0.5",
"@types/archiver": "^5.3.2",
"@types/babel__core": "^7.20.1",
"@types/babel__standalone": "^7.1.4",
"@types/chroma-js": "^2.4.0",
@@ -131,7 +126,6 @@
"@types/react": "^18.2.6",
"@types/react-dom": "^18.2.4",
"@types/react-syntax-highlighter": "^15.5.7",
"@types/stream-buffers": "^3.0.4",
"@types/uuid": "^9.0.2",
"@typescript-eslint/eslint-plugin": "^5.59.6",
"@typescript-eslint/parser": "^5.59.6",

View File

@@ -1,12 +0,0 @@
import { prisma } from "~/server/db";
// delete most recent fineTune
const mostRecentFineTune = await prisma.fineTune.findFirst({
orderBy: { createdAt: "desc" },
});
if (mostRecentFineTune) {
await prisma.fineTune.delete({
where: { id: mostRecentFineTune.id },
});
}

View File

@@ -1,48 +0,0 @@
/*
Warnings:
- You are about to drop the column `input` on the `DatasetEntry` table. All the data in the column will be lost.
- You are about to drop the column `output` on the `DatasetEntry` table. All the data in the column will be lost.
- Added the required column `loggedCallId` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
*/
-- AlterTable
ALTER TABLE "DatasetEntry" DROP COLUMN "input",
DROP COLUMN "output",
ADD COLUMN "loggedCallId" UUID NOT NULL;
-- AddForeignKey
ALTER TABLE "DatasetEntry" ADD CONSTRAINT "DatasetEntry_loggedCallId_fkey" FOREIGN KEY ("loggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AlterTable
ALTER TABLE "LoggedCallModelResponse" ALTER COLUMN "cost" SET DATA TYPE DOUBLE PRECISION;
-- CreateEnum
CREATE TYPE "FineTuneStatus" AS ENUM ('PENDING', 'TRAINING', 'AWAITING_DEPLOYMENT', 'DEPLOYING', 'DEPLOYED', 'ERROR');
-- CreateTable
CREATE TABLE "FineTune" (
"id" UUID NOT NULL,
"slug" TEXT NOT NULL,
"baseModel" TEXT NOT NULL,
"status" "FineTuneStatus" NOT NULL DEFAULT 'PENDING',
"trainingStartedAt" TIMESTAMP(3),
"trainingFinishedAt" TIMESTAMP(3),
"deploymentStartedAt" TIMESTAMP(3),
"deploymentFinishedAt" TIMESTAMP(3),
"datasetId" UUID NOT NULL,
"projectId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "FineTune_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "FineTune_slug_key" ON "FineTune"("slug");
-- AddForeignKey
ALTER TABLE "FineTune" ADD CONSTRAINT "FineTune_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "FineTune" ADD CONSTRAINT "FineTune_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -181,7 +181,6 @@ model Dataset {
name String
datasetEntries DatasetEntry[]
fineTunes FineTune[]
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
@@ -193,8 +192,8 @@ model Dataset {
model DatasetEntry {
id String @id @default(uuid()) @db.Uuid
loggedCallId String @db.Uuid
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
input String
output String?
datasetId String @db.Uuid
dataset Dataset? @relation(fields: [datasetId], references: [id], onDelete: Cascade)
@@ -217,7 +216,6 @@ model Project {
experiments Experiment[]
datasets Dataset[]
loggedCalls LoggedCall[]
fineTunes FineTune[]
apiKeys ApiKey[]
}
@@ -280,7 +278,6 @@ model LoggedCall {
model String?
tags LoggedCallTag[]
datasetEntries DatasetEntry[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -315,7 +312,7 @@ model LoggedCallModelResponse {
outputTokens Int?
finishReason String?
completionId String?
cost Float?
cost Decimal? @db.Decimal(18, 12)
// The LoggedCall that created this LoggedCallModelResponse
originalLoggedCallId String @unique @db.Uuid
@@ -430,33 +427,3 @@ model VerificationToken {
@@unique([identifier, token])
}
enum FineTuneStatus {
PENDING
TRAINING
AWAITING_DEPLOYMENT
DEPLOYING
DEPLOYED
ERROR
}
model FineTune {
id String @id @default(uuid()) @db.Uuid
slug String @unique
baseModel String
status FineTuneStatus @default(PENDING)
trainingStartedAt DateTime?
trainingFinishedAt DateTime?
deploymentStartedAt DateTime?
deploymentFinishedAt DateTime?
datasetId String @db.Uuid
dataset Dataset @relation(fields: [datasetId], references: [id], onDelete: Cascade)
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}

View File

@@ -80,7 +80,7 @@ const MODEL_RESPONSE_TEMPLATES: {
},
respStatus: 200,
respPayload: {
id: "chatcmpl-7",
id: "chatcmpl-7lNspqePJWVyXwXebupxb1eMozo6Q",
model: "gpt-3.5-turbo-0613",
usage: {
total_tokens: 241,
@@ -108,7 +108,7 @@ const MODEL_RESPONSE_TEMPLATES: {
inputTokens: 236,
outputTokens: 5,
finishReason: "stop",
tags: [{ name: "prompt_id", value: "define_func" }],
tags: [],
},
{
reqPayload: {
@@ -167,7 +167,7 @@ const MODEL_RESPONSE_TEMPLATES: {
},
respStatus: 200,
respPayload: {
id: "chatcmpl-7",
id: "chatcmpl-7lNifmc5AncyAvleZRDBhAcLFYBIT",
model: "gpt-3.5-turbo-0613",
usage: {
total_tokens: 227,
@@ -210,7 +210,7 @@ const MODEL_RESPONSE_TEMPLATES: {
},
respStatus: 200,
respPayload: {
id: "chatcmpl-7",
id: "chatcmpl-7lNh1TtrsJVgz3Nj70bKkZZk7xPi7",
model: "gpt-3.5-turbo-0613",
usage: {
total_tokens: 21,
@@ -234,7 +234,7 @@ const MODEL_RESPONSE_TEMPLATES: {
inputTokens: 14,
outputTokens: 7,
finishReason: "stop",
tags: [{ name: "prompt_id", value: "translate_text" }],
tags: [{ name: "prompt_id", value: "id2" }],
},
{
reqPayload: {
@@ -281,7 +281,7 @@ const MODEL_RESPONSE_TEMPLATES: {
},
respStatus: 200,
respPayload: {
id: "chatcmpl-7",
id: "chatcmpl-7lQS3MktOT8BTgNEytl9dkyssCQqL",
model: "gpt-4-0613",
usage: {
total_tokens: 2910,
@@ -311,7 +311,7 @@ const MODEL_RESPONSE_TEMPLATES: {
outputTokens: 108,
finishReason: "stop",
tags: [
{ name: "prompt_id", value: "chatcmpl-7" },
{ name: "prompt_id", value: "chatcmpl-7lQS3MktOT8BTgNEytl9dkyssCQqL" },
{ name: "some_other_tag", value: "some_other_value" },
],
},
@@ -339,7 +339,7 @@ const loggedCallsToCreate: Prisma.LoggedCallCreateManyInput[] = [];
const loggedCallModelResponsesToCreate: Prisma.LoggedCallModelResponseCreateManyInput[] = [];
const loggedCallsToUpdate: Prisma.LoggedCallUpdateArgs[] = [];
const loggedCallTagsToCreate: Prisma.LoggedCallTagCreateManyInput[] = [];
for (let i = 0; i < 11437; i++) {
for (let i = 0; i < 1437; i++) {
const loggedCallId = uuidv4();
const loggedCallModelResponseId = uuidv4();
const template =

View File

@@ -10,4 +10,6 @@ pnpm tsx src/promptConstructor/migrate.ts
echo "Starting the server"
pnpm start
pnpm concurrently --kill-others \
"pnpm start" \
"pnpm tsx src/server/tasks/worker.ts"

View File

@@ -1,6 +0,0 @@
#! /bin/bash
set -e
cd "$(dirname "$0")/.."
apt-get update
apt-get install -y htop psql

View File

@@ -1,10 +0,0 @@
#! /bin/bash
set -e
echo "Migrating the database"
pnpm prisma migrate deploy
echo "Starting 4 workers"
pnpm concurrently "pnpm worker" "pnpm worker" "pnpm worker" "pnpm worker"

View File

@@ -1,13 +0,0 @@
#! /bin/bash
set -e
cd "$(dirname "$0")/../.."
echo "Env is"
echo $ENVIRONMENT
docker build . --file app/Dockerfile --tag "openpipe-prod"
# Run the image
docker run --env-file app/.env -it --entrypoint "/bin/bash" "openpipe-prod"

View File

@@ -3,7 +3,6 @@
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
import * as Sentry from "@sentry/nextjs";
import { isError } from "lodash-es";
import { env } from "~/env.mjs";
if (env.NEXT_PUBLIC_SENTRY_DSN) {
@@ -16,10 +15,4 @@ if (env.NEXT_PUBLIC_SENTRY_DSN) {
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
});
} else {
// Install local debug exception handler for rejected promises
process.on("unhandledRejection", (reason) => {
const reasonDetails = isError(reason) ? reason?.stack : reason;
console.log("Unhandled Rejection at:", reasonDetails);
});
}

View File

@@ -1,65 +0,0 @@
import {
Button,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
ModalOverlay,
VStack,
Text,
HStack,
Icon,
Link,
} from "@chakra-ui/react";
import { BsStars } from "react-icons/bs";
import { useSession } from "next-auth/react";
export const BetaModal = ({ isOpen, onClose }: { isOpen: boolean; onClose: () => void }) => {
const session = useSession();
const email = session.data?.user.email ?? "";
return (
<Modal
isOpen={isOpen}
onClose={onClose}
closeOnOverlayClick={false}
size={{ base: "xl", md: "2xl" }}
>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={BsStars} />
<Text>Beta-Only Feature</Text>
</HStack>
</ModalHeader>
<ModalBody maxW="unset">
<VStack spacing={8} py={4} alignItems="flex-start">
<Text fontSize="md">
This feature is currently in beta. To receive early access to beta-only features, join
the waitlist. You'll receive an email at <b>{email}</b> when you're approved.
</Text>
</VStack>
</ModalBody>
<ModalFooter>
<HStack spacing={4}>
<Button
as={Link}
textDecoration="none !important"
colorScheme="orange"
target="_blank"
href={`https://ax3nafkw0jp.typeform.com/to/ZNpYqvAc#email=${email}`}
>
Join Waitlist
</Button>
<Button colorScheme="blue" onClick={onClose}>
Done
</Button>
</HStack>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -1,4 +1,3 @@
import { useState, useMemo, useCallback } from "react";
import {
Button,
HStack,
@@ -15,18 +14,16 @@ import {
VStack,
} from "@chakra-ui/react";
import { type PromptVariant } from "@prisma/client";
import { isString } from "lodash-es";
import { isObject, isString } from "lodash-es";
import { useState } from "react";
import { RiExchangeFundsFill } from "react-icons/ri";
import { type ProviderModel } from "~/modelProviders/types";
import { api } from "~/utils/api";
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
import { useExperiment, useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
import { lookupModel, modelLabel } from "~/utils/utils";
import CompareFunctions from "../RefinePromptModal/CompareFunctions";
import { ModelSearch } from "./ModelSearch";
import { ModelStatsCard } from "./ModelStatsCard";
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
import { useAppStore } from "~/state/store";
export const ChangeModelModal = ({
variant,
@@ -35,43 +32,48 @@ export const ChangeModelModal = ({
variant: PromptVariant;
onClose: () => void;
}) => {
const editorOptionsMap = useAppStore((s) => s.sharedVariantEditor.editorOptionsMap);
const originalPromptFn = useMemo(
() => editorOptionsMap[variant.uiId]?.getContent() || "",
[editorOptionsMap, variant.uiId],
);
const originalModel = lookupModel(variant.modelProvider, variant.model);
const [selectedModel, setSelectedModel] = useState({
provider: variant.modelProvider,
model: variant.model,
} as ProviderModel);
const [convertedModel, setConvertedModel] = useState<ProviderModel | undefined>();
const [modifiedPromptFn, setModifiedPromptFn] = useState<string>();
const visibleScenarios = useVisibleScenarioIds();
const utils = api.useContext();
const experiment = useExperiment();
const { mutateAsync: getModifiedPromptMutateAsync } =
const { mutateAsync: getModifiedPromptMutateAsync, data: modifiedPromptFn } =
api.promptVariants.getModifiedPromptFn.useMutation();
const [getModifiedPromptFn, modificationInProgress] = useHandledAsyncCallback(async () => {
if (!experiment) return;
const resp = await getModifiedPromptMutateAsync({
await getModifiedPromptMutateAsync({
id: variant.id,
originalPromptFn,
newModel: selectedModel,
});
if (maybeReportError(resp)) return;
setModifiedPromptFn(resp.payload);
setConvertedModel(selectedModel);
}, [getModifiedPromptMutateAsync, onClose, experiment, variant, selectedModel]);
const replaceVariant = useCallback(() => {
if (!modifiedPromptFn) return;
editorOptionsMap[variant.uiId]?.setContent(modifiedPromptFn);
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
const [replaceVariant, replacementInProgress] = useHandledAsyncCallback(async () => {
if (
!variant.experimentId ||
!modifiedPromptFn ||
(isObject(modifiedPromptFn) && "status" in modifiedPromptFn)
)
return;
await replaceVariantMutation.mutateAsync({
id: variant.id,
promptConstructor: modifiedPromptFn,
streamScenarios: visibleScenarios,
});
await utils.promptVariants.list.invalidate();
onClose();
}, [variant.uiId, editorOptionsMap, onClose, modifiedPromptFn]);
}, [replaceVariantMutation, variant, onClose, modifiedPromptFn]);
const originalLabel = modelLabel(variant.modelProvider, variant.model);
const selectedLabel = modelLabel(selectedModel.provider, selectedModel.model);
@@ -128,9 +130,9 @@ export const ChangeModelModal = ({
colorScheme="blue"
onClick={replaceVariant}
minW={24}
isDisabled={!convertedModel || modificationInProgress}
isDisabled={!convertedModel || modificationInProgress || replacementInProgress}
>
Accept
{replacementInProgress ? <Spinner boxSize={4} /> : <Text>Accept</Text>}
</Button>
</HStack>
</ModalFooter>

View File

@@ -1,41 +1,74 @@
import { Button, Icon, useDisclosure, Text } from "@chakra-ui/react";
import { useRouter } from "next/router";
import { BsTrash } from "react-icons/bs";
import {
Button,
Icon,
AlertDialog,
AlertDialogBody,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogContent,
AlertDialogOverlay,
useDisclosure,
Text,
} from "@chakra-ui/react";
import { useRouter } from "next/router";
import { useRef } from "react";
import { BsTrash } from "react-icons/bs";
import { useAppStore } from "~/state/store";
import { api } from "~/utils/api";
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
import DeleteExperimentDialog from "../experiments/DeleteExperimentDialog";
export const DeleteButton = () => {
const experiment = useExperiment();
const mutation = api.experiments.delete.useMutation();
const utils = api.useContext();
const router = useRouter();
const disclosure = useDisclosure();
const closeDrawer = useAppStore((s) => s.closeDrawer);
const [onDelete] = useHandledAsyncCallback(async () => {
const { isOpen, onOpen, onClose } = useDisclosure();
const cancelRef = useRef<HTMLButtonElement>(null);
const [onDeleteConfirm] = useHandledAsyncCallback(async () => {
if (!experiment.data?.id) return;
await mutation.mutateAsync({ id: experiment.data.id });
await utils.experiments.list.invalidate();
await router.push({ pathname: "/experiments" });
closeDrawer();
}, [router, closeDrawer]);
onClose();
}, [mutation, experiment.data?.id, router]);
return (
<>
<Button
size="sm"
variant="ghost"
colorScheme="red"
fontWeight="normal"
onClick={disclosure.onOpen}
>
<Button size="sm" variant="ghost" colorScheme="red" fontWeight="normal" onClick={onOpen}>
<Icon as={BsTrash} boxSize={4} />
<Text ml={2}>Delete Experiment</Text>
</Button>
<DeleteExperimentDialog
experimentId={experiment.data?.id}
onDelete={onDelete}
disclosure={disclosure}
/>
<AlertDialog isOpen={isOpen} leastDestructiveRef={cancelRef} onClose={onClose}>
<AlertDialogOverlay>
<AlertDialogContent>
<AlertDialogHeader fontSize="lg" fontWeight="bold">
Delete Experiment
</AlertDialogHeader>
<AlertDialogBody>
If you delete this experiment all the associated prompts and scenarios will be deleted
as well. Are you sure?
</AlertDialogBody>
<AlertDialogFooter>
<Button ref={cancelRef} onClick={onClose}>
Cancel
</Button>
<Button colorScheme="red" onClick={onDeleteConfirm} ml={3}>
Delete
</Button>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialogOverlay>
</AlertDialog>
</>
);
};

View File

@@ -1,14 +0,0 @@
import { Tooltip, Icon, VStack } from "@chakra-ui/react";
import { RiInformationFill } from "react-icons/ri";
const InfoCircle = ({ tooltipText }: { tooltipText: string }) => {
return (
<Tooltip label={tooltipText} fontSize="sm" shouldWrapChildren maxW={80}>
<VStack>
<Icon as={RiInformationFill} boxSize={5} color="gray.500" />
</VStack>
</Tooltip>
);
};
export default InfoCircle;

View File

@@ -11,7 +11,6 @@ import {
Button,
Text,
useDisclosure,
type InputGroupProps,
} from "@chakra-ui/react";
import { FiChevronDown } from "react-icons/fi";
@@ -21,25 +20,15 @@ type InputDropdownProps<T> = {
options: ReadonlyArray<T>;
selectedOption: T;
onSelect: (option: T) => void;
inputGroupProps?: InputGroupProps;
};
const InputDropdown = <T,>({
options,
selectedOption,
onSelect,
inputGroupProps,
}: InputDropdownProps<T>) => {
const InputDropdown = <T,>({ options, selectedOption, onSelect }: InputDropdownProps<T>) => {
const popover = useDisclosure();
return (
<Popover placement="bottom-start" {...popover}>
<PopoverTrigger>
<InputGroup
cursor="pointer"
w={(selectedOption as string).length * 14 + 180}
{...inputGroupProps}
>
<InputGroup cursor="pointer" w={(selectedOption as string).length * 14 + 180}>
<Input
value={selectedOption as string}
// eslint-disable-next-line @typescript-eslint/no-empty-function -- controlled input requires onChange

View File

@@ -8,7 +8,7 @@ import {
useHandledAsyncCallback,
useVisibleScenarioIds,
} from "~/utils/hooks";
import { cellPadding } from "./constants";
import { cellPadding } from "../constants";
import { ActionButton } from "./ScenariosHeader";
export default function AddVariantButton() {

View File

@@ -43,7 +43,7 @@ export default function OutputCell({
type OutputSchema = Parameters<typeof provider.normalizeOutput>[0];
const { mutateAsync: hardRefetchMutate } = api.scenarioVariantCells.hardRefetch.useMutation();
const { mutateAsync: hardRefetchMutate } = api.scenarioVariantCells.forceRefetch.useMutation();
const [hardRefetch, hardRefetching] = useHandledAsyncCallback(async () => {
await hardRefetchMutate({ scenarioId: scenario.id, variantId: variant.id });
await utils.scenarioVariantCells.get.invalidate({
@@ -147,10 +147,9 @@ export default function OutputCell({
<ResponseLog
time={response.receivedAt}
title="Response received from API"
message={[
response.statusCode ? `Status: ${response.statusCode}\n` : "",
response.errorMessage ?? "",
].join("")}
message={`statusCode: ${response.statusCode ?? ""}\n ${
response.errorMessage ?? ""
}`}
/>
)}
</Fragment>

View File

@@ -16,7 +16,7 @@ import {
VStack,
} from "@chakra-ui/react";
import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
import { cellPadding } from "./constants";
import { cellPadding } from "../constants";
import { FloatingLabelInput } from "./FloatingLabelInput";
import { ScenarioEditorModal } from "./ScenarioEditorModal";

View File

@@ -11,7 +11,7 @@ import {
IconButton,
Spinner,
} from "@chakra-ui/react";
import { cellPadding } from "./constants";
import { cellPadding } from "../constants";
import {
useExperiment,
useExperimentAccess,

View File

@@ -10,7 +10,7 @@ import {
} from "@chakra-ui/react";
import { useCallback, useEffect, useRef, useState } from "react";
import { FiMaximize, FiMinimize } from "react-icons/fi";
import { type CreatedEditor, editorBackground } from "~/state/sharedVariantEditor.slice";
import { editorBackground } from "~/state/sharedVariantEditor.slice";
import { useAppStore } from "~/state/store";
import { api } from "~/utils/api";
import {
@@ -24,10 +24,8 @@ import { type PromptVariant } from "./types";
export default function VariantEditor(props: { variant: PromptVariant }) {
const { canModify } = useExperimentAccess();
const monaco = useAppStore.use.sharedVariantEditor.monaco();
const updateOptionsForEditor = useAppStore.use.sharedVariantEditor.updateOptionsForEditor();
const editorRef = useRef<CreatedEditor | null>(null);
const editorRef = useRef<ReturnType<NonNullable<typeof monaco>["editor"]["create"]> | null>(null);
const containerRef = useRef<HTMLDivElement | null>(null);
const lastSavedFnRef = useRef(props.variant.promptConstructor);
const [editorId] = useState(() => `editor_${Math.random().toString(36).substring(7)}`);
const [isChanged, setIsChanged] = useState(false);
@@ -50,18 +48,22 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
}, [isFullscreen, toggleFullscreen]);
const lastSavedFn = props.variant.promptConstructor;
useEffect(() => {
// Store in ref so that we can access it dynamically
lastSavedFnRef.current = lastSavedFn;
}, [lastSavedFn]);
const modifierKey = useModifierKeyLabel();
const checkForChanges = useCallback(() => {
if (!editorRef.current) return;
const currentFn = editorRef.current.getValue();
setIsChanged(currentFn.length > 0 && currentFn !== lastSavedFnRef.current);
}, [editorRef]);
setIsChanged(currentFn.length > 0 && currentFn !== lastSavedFn);
}, [lastSavedFn]);
const matchUpdatedSavedFn = useCallback(() => {
if (!editorRef.current) return;
editorRef.current.setValue(lastSavedFn);
setIsChanged(false);
}, [lastSavedFn]);
useEffect(matchUpdatedSavedFn, [matchUpdatedSavedFn, lastSavedFn]);
const replaceVariant = api.promptVariants.replaceVariant.useMutation();
const utils = api.useContext();
@@ -108,7 +110,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
setIsChanged(false);
await utils.promptVariants.list.invalidate();
}, [checkForChanges, replaceVariant.mutateAsync]);
}, [checkForChanges]);
useEffect(() => {
if (monaco) {
@@ -134,11 +136,6 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
readOnly: !canModify,
});
updateOptionsForEditor(props.variant.uiId, {
getContent: () => editorRef.current?.getValue() || "",
setContent: (content) => editorRef.current?.setValue(content),
});
// Workaround because otherwise the commands only work on whatever
// editor was loaded on the page last.
// https://github.com/microsoft/monaco-editor/issues/2947#issuecomment-1422265201
@@ -158,7 +155,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
});
});
const checkForChangesListener = editorRef.current.onDidChangeModelContent(checkForChanges);
editorRef.current.onDidChangeModelContent(checkForChanges);
const resizeObserver = new ResizeObserver(() => {
editorRef.current?.layout();
@@ -167,7 +164,6 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
return () => {
resizeObserver.disconnect();
checkForChangesListener.dispose();
editorRef.current?.dispose();
};
}
@@ -175,7 +171,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
// We intentionally skip the onSave and props.savedConfig dependencies here because
// we don't want to re-render the editor from scratch
/* eslint-disable-next-line react-hooks/exhaustive-deps */
}, [monaco, editorId, updateOptionsForEditor]);
}, [monaco, editorId]);
useEffect(() => {
if (!editorRef.current) return;

View File

@@ -1,6 +1,6 @@
import { HStack, Icon, Text, useToken } from "@chakra-ui/react";
import { type PromptVariant } from "./types";
import { cellPadding } from "./constants";
import { cellPadding } from "../constants";
import { api } from "~/utils/api";
import chroma from "chroma-js";
import { BsCurrencyDollar } from "react-icons/bs";

View File

@@ -3,14 +3,13 @@ import { api } from "~/utils/api";
import AddVariantButton from "./AddVariantButton";
import ScenarioRow from "./ScenarioRow";
import VariantEditor from "./VariantEditor";
import VariantHeader from "./VariantHeader/VariantHeader";
import VariantHeader from "../VariantHeader/VariantHeader";
import VariantStats from "./VariantStats";
import { ScenariosHeader } from "./ScenariosHeader";
import { borders } from "./styles";
import { useScenarios } from "~/utils/hooks";
import ScenarioPaginator from "./ScenarioPaginator";
import { Fragment } from "react";
import useScrolledPast from "./useHasScrolledPast";
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
const variants = api.promptVariants.list.useQuery(
@@ -19,7 +18,6 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
);
const scenarios = useScenarios();
const shouldFlattenHeader = useScrolledPast(50);
if (!variants.data || !scenarios.data) return null;
@@ -65,8 +63,8 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
variant={variant}
canHide={variants.data.length > 1}
rowStart={1}
borderTopLeftRadius={isFirst && !shouldFlattenHeader ? 8 : 0}
borderTopRightRadius={isLast && !shouldFlattenHeader ? 8 : 0}
borderTopLeftRadius={isFirst ? 8 : 0}
borderTopRightRadius={isLast ? 8 : 0}
{...sharedProps}
/>
<GridItem rowStart={2} {...sharedProps}>
@@ -77,7 +75,6 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
{...sharedProps}
borderBottomLeftRadius={isFirst ? 8 : 0}
borderBottomRightRadius={isLast ? 8 : 0}
boxShadow="5px 5px 15px 1px rgba(0, 0, 0, 0.1);"
>
<VariantStats variant={variant} />
</GridItem>

View File

@@ -1,34 +0,0 @@
import { useState, useEffect } from "react";
const useScrolledPast = (scrollThreshold: number) => {
const [hasScrolledPast, setHasScrolledPast] = useState(true);
useEffect(() => {
const container = document.getElementById("output-container");
if (!container) {
console.warn('Element with id "outputs-container" not found.');
return;
}
const checkScroll = () => {
const { scrollTop } = container;
// Check if scrollTop is greater than or equal to scrollThreshold
setHasScrolledPast(scrollTop > scrollThreshold);
};
checkScroll();
container.addEventListener("scroll", checkScroll);
// Cleanup
return () => {
container.removeEventListener("scroll", checkScroll);
};
}, []);
return hasScrolledPast;
};
export default useScrolledPast;

View File

@@ -1,19 +1,15 @@
import {
HStack,
IconButton,
Text,
Select,
type StackProps,
Icon,
useBreakpointValue,
} from "@chakra-ui/react";
import { HStack, IconButton, Text, Select, type StackProps, Icon } from "@chakra-ui/react";
import React, { useCallback } from "react";
import { FiChevronsLeft, FiChevronsRight, FiChevronLeft, FiChevronRight } from "react-icons/fi";
import { usePageParams } from "~/utils/hooks";
const pageSizeOptions = [10, 25, 50, 100];
const Paginator = ({ count, ...props }: { count: number; condense?: boolean } & StackProps) => {
const Paginator = ({
count,
condense,
...props
}: { count: number; condense?: boolean } & StackProps) => {
const { page, pageSize, setPageParams } = usePageParams();
const lastPage = Math.ceil(count / pageSize);
@@ -41,9 +37,6 @@ const Paginator = ({ count, ...props }: { count: number; condense?: boolean } &
const goToLastPage = () => setPageParams({ page: lastPage }, "replace");
const goToFirstPage = () => setPageParams({ page: 1 }, "replace");
const isMobile = useBreakpointValue({ base: true, md: false });
const condense = isMobile || props.condense;
if (count === 0) return null;
return (

View File

@@ -1,4 +1,3 @@
import { useState, useMemo, useCallback } from "react";
import {
Button,
Modal,
@@ -10,23 +9,22 @@ import {
ModalOverlay,
VStack,
Text,
Spinner,
HStack,
Icon,
SimpleGrid,
} from "@chakra-ui/react";
import { BsStars } from "react-icons/bs";
import { api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks";
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
import { type PromptVariant } from "@prisma/client";
import { useState } from "react";
import CompareFunctions from "./CompareFunctions";
import { CustomInstructionsInput } from "../CustomInstructionsInput";
import { RefineAction } from "./RefineAction";
import { isString } from "lodash-es";
import { isObject, isString } from "lodash-es";
import { type RefinementAction, type SupportedProvider } from "~/modelProviders/types";
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
import { useAppStore } from "~/state/store";
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
export const RefinePromptModal = ({
variant,
@@ -35,23 +33,19 @@ export const RefinePromptModal = ({
variant: PromptVariant;
onClose: () => void;
}) => {
const editorOptionsMap = useAppStore((s) => s.sharedVariantEditor.editorOptionsMap);
const originalPromptFn = useMemo(
() => editorOptionsMap[variant.uiId]?.getContent() || "",
[editorOptionsMap, variant.uiId],
);
const utils = api.useContext();
const visibleScenarios = useVisibleScenarioIds();
const refinementActions =
frontendModelProviders[variant.modelProvider as SupportedProvider].refinementActions || {};
const { mutateAsync: getModifiedPromptMutateAsync } =
const { mutateAsync: getModifiedPromptMutateAsync, data: refinedPromptFn } =
api.promptVariants.getModifiedPromptFn.useMutation();
const [instructions, setInstructions] = useState<string>("");
const [activeRefineActionLabel, setActiveRefineActionLabel] = useState<string | undefined>(
undefined,
);
const [refinedPromptFn, setRefinedPromptFn] = useState<string>();
const [getModifiedPromptFn, modificationInProgress] = useHandledAsyncCallback(
async (label?: string) => {
@@ -60,22 +54,31 @@ export const RefinePromptModal = ({
? (refinementActions[label] as RefinementAction).instructions
: instructions;
setActiveRefineActionLabel(label);
const resp = await getModifiedPromptMutateAsync({
await getModifiedPromptMutateAsync({
id: variant.id,
originalPromptFn,
instructions: updatedInstructions,
});
if (maybeReportError(resp)) return;
setRefinedPromptFn(resp.payload);
},
[getModifiedPromptMutateAsync, onClose, variant, instructions, setActiveRefineActionLabel],
);
const replaceVariant = useCallback(() => {
if (!refinedPromptFn) return;
editorOptionsMap[variant.uiId]?.setContent(refinedPromptFn);
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
const [replaceVariant, replacementInProgress] = useHandledAsyncCallback(async () => {
if (
!variant.experimentId ||
!refinedPromptFn ||
(isObject(refinedPromptFn) && "status" in refinedPromptFn)
)
return;
await replaceVariantMutation.mutateAsync({
id: variant.id,
promptConstructor: refinedPromptFn,
streamScenarios: visibleScenarios,
});
await utils.promptVariants.list.invalidate();
onClose();
}, [variant.uiId, editorOptionsMap, onClose, refinedPromptFn]);
}, [replaceVariantMutation, variant, onClose, refinedPromptFn]);
return (
<Modal
@@ -123,7 +126,7 @@ export const RefinePromptModal = ({
/>
</VStack>
<CompareFunctions
originalFunction={originalPromptFn}
originalFunction={variant.promptConstructor}
newFunction={isString(refinedPromptFn) ? refinedPromptFn : undefined}
maxH="40vh"
/>
@@ -136,9 +139,9 @@ export const RefinePromptModal = ({
colorScheme="blue"
onClick={replaceVariant}
minW={24}
isDisabled={!refinedPromptFn}
isDisabled={replacementInProgress || !refinedPromptFn}
>
Accept
{replacementInProgress ? <Spinner boxSize={4} /> : <Text>Accept</Text>}
</Button>
</HStack>
</ModalFooter>

View File

@@ -0,0 +1,26 @@
import { VStack, HStack, type StackProps, Text, Divider } from "@chakra-ui/react";
import Link, { type LinkProps } from "next/link";
const StatsCard = ({
title,
href,
children,
...rest
}: { title: string; href: string } & StackProps & LinkProps) => {
return (
<VStack flex={1} borderWidth={1} padding={4} borderRadius={4} borderColor="gray.300" {...rest}>
<HStack w="full" justifyContent="space-between">
<Text fontSize="md" fontWeight="bold">
{title}
</Text>
<Link href={href}>
<Text color="blue">View all</Text>
</Link>
</HStack>
<Divider />
{children}
</VStack>
);
};
export default StatsCard;

View File

@@ -1,11 +1,11 @@
import { useState, type DragEvent } from "react";
import { type PromptVariant } from "../types";
import { type PromptVariant } from "../OutputsTable/types";
import { api } from "~/utils/api";
import { RiDraggable } from "react-icons/ri";
import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here
import { cellPadding, headerMinHeight } from "../constants";
import AutoResizeTextArea from "../../AutoResizeTextArea";
import AutoResizeTextArea from "../AutoResizeTextArea";
import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
export default function VariantHeader(
@@ -75,7 +75,7 @@ export default function VariantHeader(
padding={0}
sx={{
position: "sticky",
top: "0",
top: "-2",
// Ensure that the menu always appears above the sticky header of other variants
zIndex: menuOpen ? "dropdown" : 10,
}}

View File

@@ -1,4 +1,6 @@
import { useState } from "react";
import { type PromptVariant } from "../OutputsTable/types";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
import {
Icon,
Menu,
@@ -12,13 +14,10 @@ import {
} from "@chakra-ui/react";
import { BsFillTrashFill, BsGear, BsStars } from "react-icons/bs";
import { FaRegClone } from "react-icons/fa";
import { useState } from "react";
import { RefinePromptModal } from "../RefinePromptModal/RefinePromptModal";
import { RiExchangeFundsFill } from "react-icons/ri";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
import { type PromptVariant } from "../types";
import { RefinePromptModal } from "../../RefinePromptModal/RefinePromptModal";
import { ChangeModelModal } from "../../ChangeModelModal/ChangeModelModal";
import { ChangeModelModal } from "../ChangeModelModal/ChangeModelModal";
export default function VariantHeaderMenuButton({
variant,

View File

@@ -2,12 +2,11 @@ import { Card, CardHeader, Heading, Table, Tbody, HStack, Button, Text } from "@
import { useState } from "react";
import Link from "next/link";
import { useLoggedCalls } from "~/utils/hooks";
import { EmptyTableRow, TableHeader, TableRow } from "../requestLogs/TableRow";
import { TableHeader, TableRow } from "../requestLogs/TableRow";
export default function LoggedCallsTable() {
const { data: loggedCalls } = useLoggedCalls(false);
const [expandedRow, setExpandedRow] = useState<string | null>(null);
const { data: loggedCalls } = useLoggedCalls();
return (
<Card width="100%" overflow="hidden">
@@ -24,8 +23,7 @@ export default function LoggedCallsTable() {
<Table>
<TableHeader />
<Tbody>
{loggedCalls?.calls.length ? (
loggedCalls?.calls.map((loggedCall) => {
{loggedCalls?.calls.map((loggedCall) => {
return (
<TableRow
key={loggedCall.id}
@@ -40,10 +38,7 @@ export default function LoggedCallsTable() {
}}
/>
);
})
) : (
<EmptyTableRow filtersApplied={false} />
)}
})}
</Tbody>
</Table>
</Card>

View File

@@ -0,0 +1,112 @@
import {
HStack,
Icon,
VStack,
Text,
Divider,
Spinner,
AspectRatio,
SkeletonText,
} from "@chakra-ui/react";
import { RiDatabase2Line } from "react-icons/ri";
import { formatTimePast } from "~/utils/dayjs";
import Link from "next/link";
import { useRouter } from "next/router";
import { BsPlusSquare } from "react-icons/bs";
import { api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks";
import { useAppStore } from "~/state/store";
type DatasetData = {
name: string;
numEntries: number;
id: string;
createdAt: Date;
updatedAt: Date;
};
export const DatasetCard = ({ dataset }: { dataset: DatasetData }) => {
return (
<AspectRatio ratio={1.2} w="full">
<VStack
as={Link}
href={{ pathname: "/data/[id]", query: { id: dataset.id } }}
bg="gray.50"
_hover={{ bg: "gray.100" }}
transition="background 0.2s"
cursor="pointer"
borderColor="gray.200"
borderWidth={1}
p={4}
justify="space-between"
>
<HStack w="full" color="gray.700" justify="center">
<Icon as={RiDatabase2Line} boxSize={4} />
<Text fontWeight="bold">{dataset.name}</Text>
</HStack>
<HStack h="full" spacing={4} flex={1} align="center">
<CountLabel label="Rows" count={dataset.numEntries} />
</HStack>
<HStack w="full" color="gray.500" fontSize="xs" textAlign="center">
<Text flex={1}>Created {formatTimePast(dataset.createdAt)}</Text>
<Divider h={4} orientation="vertical" />
<Text flex={1}>Updated {formatTimePast(dataset.updatedAt)}</Text>
</HStack>
</VStack>
</AspectRatio>
);
};
const CountLabel = ({ label, count }: { label: string; count: number }) => {
return (
<VStack alignItems="center" flex={1}>
<Text color="gray.500" fontWeight="bold">
{label}
</Text>
<Text fontSize="sm" color="gray.500">
{count}
</Text>
</VStack>
);
};
export const NewDatasetCard = () => {
const router = useRouter();
const selectedProjectId = useAppStore((s) => s.selectedProjectId);
const createMutation = api.datasets.create.useMutation();
const [createDataset, isLoading] = useHandledAsyncCallback(async () => {
const newDataset = await createMutation.mutateAsync({ projectId: selectedProjectId ?? "" });
await router.push({ pathname: "/data/[id]", query: { id: newDataset.id } });
}, [createMutation, router, selectedProjectId]);
return (
<AspectRatio ratio={1.2} w="full">
<VStack
align="center"
justify="center"
_hover={{ cursor: "pointer", bg: "gray.50" }}
transition="background 0.2s"
cursor="pointer"
borderColor="gray.200"
borderWidth={1}
p={4}
onClick={createDataset}
>
<Icon as={isLoading ? Spinner : BsPlusSquare} boxSize={8} />
<Text display={{ base: "none", md: "block" }} ml={2}>
New Dataset
</Text>
</VStack>
</AspectRatio>
);
};
export const DatasetCardSkeleton = () => (
<AspectRatio ratio={1.2} w="full">
<VStack align="center" borderColor="gray.200" borderWidth={1} p={4} bg="gray.50">
<SkeletonText noOfLines={1} w="80%" />
<SkeletonText noOfLines={2} w="60%" />
<SkeletonText noOfLines={1} w="80%" />
</VStack>
</AspectRatio>
);

View File

@@ -0,0 +1,16 @@
import { type StackProps } from "@chakra-ui/react";
import { useDatasetEntries } from "~/utils/hooks";
import Paginator from "../Paginator";
const DatasetEntriesPaginator = (props: StackProps) => {
const { data } = useDatasetEntries();
if (!data) return null;
const { count } = data;
return <Paginator count={count} {...props} />;
};
export default DatasetEntriesPaginator;

View File

@@ -0,0 +1,31 @@
import { type StackProps, VStack, Table, Th, Tr, Thead, Tbody, Text } from "@chakra-ui/react";
import { useDatasetEntries } from "~/utils/hooks";
import TableRow from "./TableRow";
import DatasetEntriesPaginator from "./DatasetEntriesPaginator";
const DatasetEntriesTable = (props: StackProps) => {
const { data } = useDatasetEntries();
return (
<VStack justifyContent="space-between" {...props}>
<Table variant="simple" sx={{ "table-layout": "fixed", width: "full" }}>
<Thead>
<Tr>
<Th>Input</Th>
<Th>Output</Th>
</Tr>
</Thead>
<Tbody>{data?.entries.map((entry) => <TableRow key={entry.id} entry={entry} />)}</Tbody>
</Table>
{(!data || data.entries.length) === 0 ? (
<Text alignSelf="flex-start" pl={6} color="gray.500">
No entries found
</Text>
) : (
<DatasetEntriesPaginator />
)}
</VStack>
);
};
export default DatasetEntriesTable;

View File

@@ -0,0 +1,26 @@
import { Button, HStack, useDisclosure } from "@chakra-ui/react";
import { BiImport } from "react-icons/bi";
import { BsStars } from "react-icons/bs";
import { GenerateDataModal } from "./GenerateDataModal";
export const DatasetHeaderButtons = () => {
const generateModalDisclosure = useDisclosure();
return (
<>
<HStack>
<Button leftIcon={<BiImport />} colorScheme="blue" variant="ghost">
Import Data
</Button>
<Button leftIcon={<BsStars />} colorScheme="blue" onClick={generateModalDisclosure.onOpen}>
Generate Data
</Button>
</HStack>
<GenerateDataModal
isOpen={generateModalDisclosure.isOpen}
onClose={generateModalDisclosure.onClose}
/>
</>
);
};

View File

@@ -0,0 +1,128 @@
import {
Modal,
ModalBody,
ModalCloseButton,
ModalContent,
ModalHeader,
ModalOverlay,
ModalFooter,
Text,
HStack,
VStack,
Icon,
NumberInput,
NumberInputField,
NumberInputStepper,
NumberIncrementStepper,
NumberDecrementStepper,
Button,
} from "@chakra-ui/react";
import { BsStars } from "react-icons/bs";
import { useState } from "react";
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
import { api } from "~/utils/api";
import AutoResizeTextArea from "~/components/AutoResizeTextArea";
export const GenerateDataModal = ({
isOpen,
onClose,
}: {
isOpen: boolean;
onClose: () => void;
}) => {
const utils = api.useContext();
const datasetId = useDataset().data?.id;
const [numToGenerate, setNumToGenerate] = useState<number>(20);
const [inputDescription, setInputDescription] = useState<string>(
"Each input should contain an email body. Half of the emails should contain event details, and the other half should not.",
);
const [outputDescription, setOutputDescription] = useState<string>(
`Each output should contain "true" or "false", where "true" indicates that the email contains event details.`,
);
const generateEntriesMutation = api.datasetEntries.autogenerateEntries.useMutation();
const [generateEntries, generateEntriesInProgress] = useHandledAsyncCallback(async () => {
if (!inputDescription || !outputDescription || !numToGenerate || !datasetId) return;
await generateEntriesMutation.mutateAsync({
datasetId,
inputDescription,
outputDescription,
numToGenerate,
});
await utils.datasetEntries.list.invalidate();
onClose();
}, [
generateEntriesMutation,
onClose,
inputDescription,
outputDescription,
numToGenerate,
datasetId,
]);
return (
<Modal isOpen={isOpen} onClose={onClose} size={{ base: "xl", sm: "2xl", md: "3xl" }}>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={BsStars} />
<Text>Generate Data</Text>
</HStack>
</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack w="full" spacing={8} padding={8} alignItems="flex-start">
<VStack alignItems="flex-start" spacing={2}>
<Text fontWeight="bold">Number of Rows:</Text>
<NumberInput
step={5}
defaultValue={15}
min={0}
max={100}
onChange={(valueString) => setNumToGenerate(parseInt(valueString) || 0)}
value={numToGenerate}
w="24"
>
<NumberInputField />
<NumberInputStepper>
<NumberIncrementStepper />
<NumberDecrementStepper />
</NumberInputStepper>
</NumberInput>
</VStack>
<VStack alignItems="flex-start" w="full" spacing={2}>
<Text fontWeight="bold">Input Description:</Text>
<AutoResizeTextArea
value={inputDescription}
onChange={(e) => setInputDescription(e.target.value)}
placeholder="Each input should contain..."
/>
</VStack>
<VStack alignItems="flex-start" w="full" spacing={2}>
<Text fontWeight="bold">Output Description (optional):</Text>
<AutoResizeTextArea
value={outputDescription}
onChange={(e) => setOutputDescription(e.target.value)}
placeholder="The output should contain..."
/>
</VStack>
</VStack>
</ModalBody>
<ModalFooter>
<Button
colorScheme="blue"
isLoading={generateEntriesInProgress}
isDisabled={!numToGenerate || !inputDescription || !outputDescription}
onClick={generateEntries}
>
Generate
</Button>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -0,0 +1,13 @@
import { Td, Tr } from "@chakra-ui/react";
import { type DatasetEntry } from "@prisma/client";
const TableRow = ({ entry }: { entry: DatasetEntry }) => {
return (
<Tr key={entry.id}>
<Td>{entry.input}</Td>
<Td>{entry.output}</Td>
</Tr>
);
};
export default TableRow;

View File

@@ -1,66 +0,0 @@
import { useRef } from "react";
import {
type UseDisclosureReturn,
AlertDialog,
AlertDialogOverlay,
AlertDialogContent,
AlertDialogHeader,
AlertDialogBody,
AlertDialogFooter,
Button,
} from "@chakra-ui/react";
import { api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks";
const DeleteExperimentDialog = ({
experimentId,
onDelete,
disclosure,
}: {
experimentId?: string;
onDelete?: () => void;
disclosure: UseDisclosureReturn;
}) => {
const cancelRef = useRef<HTMLButtonElement>(null);
const mutation = api.experiments.delete.useMutation();
const utils = api.useContext();
const [onDeleteConfirm] = useHandledAsyncCallback(async () => {
if (!experimentId) return;
await mutation.mutateAsync({ id: experimentId });
await utils.experiments.list.invalidate();
onDelete?.();
disclosure.onClose();
}, [mutation, experimentId, disclosure.onClose]);
return (
<AlertDialog leastDestructiveRef={cancelRef} {...disclosure}>
<AlertDialogOverlay>
<AlertDialogContent>
<AlertDialogHeader fontSize="lg" fontWeight="bold">
Delete Experiment
</AlertDialogHeader>
<AlertDialogBody>
If you delete this experiment all the associated prompts and scenarios will be deleted
as well. Are you sure?
</AlertDialogBody>
<AlertDialogFooter>
<Button ref={cancelRef} onClick={disclosure.onClose}>
Cancel
</Button>
<Button colorScheme="red" onClick={onDeleteConfirm} ml={3}>
Delete
</Button>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialogOverlay>
</AlertDialog>
);
};
export default DeleteExperimentDialog;

View File

@@ -1,4 +1,3 @@
import { type MouseEvent, useState } from "react";
import {
HStack,
Icon,
@@ -9,29 +8,17 @@ import {
AspectRatio,
SkeletonText,
Card,
useDisclosure,
Box,
Menu,
MenuButton,
MenuList,
MenuItem,
IconButton,
useToast,
} from "@chakra-ui/react";
import { RiFlaskLine } from "react-icons/ri";
import { formatTimePast } from "~/utils/dayjs";
import Link from "next/link";
import { useRouter } from "next/router";
import { BsPlusSquare, BsThreeDotsVertical, BsLink45Deg, BsTrash } from "react-icons/bs";
import { formatTimePast } from "~/utils/dayjs";
import { type RouterOutputs, api } from "~/utils/api";
import { BsPlusSquare } from "react-icons/bs";
import { RouterOutputs, api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks";
import { useAppStore } from "~/state/store";
import DeleteExperimentDialog from "./DeleteExperimentDialog";
export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["list"][0] }) => {
const [isMenuHovered, setIsMenuHovered] = useState(false);
return (
<Card
w="full"
@@ -40,7 +27,7 @@ export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["lis
p={4}
bg="white"
borderRadius={4}
_hover={{ bg: isMenuHovered ? undefined : "gray.100" }}
_hover={{ bg: "gray.100" }}
transition="background 0.2s"
aspectRatio={1.2}
>
@@ -51,18 +38,10 @@ export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["lis
href={{ pathname: "/experiments/[experimentSlug]", query: { experimentSlug: exp.slug } }}
justify="space-between"
>
<HStack w="full" justify="space-between" spacing={0}>
<Box w={6} />
<HStack color="gray.700" justify="center">
<HStack w="full" color="gray.700" justify="center">
<Icon as={RiFlaskLine} boxSize={4} />
<Text fontWeight="bold">{exp.label}</Text>
</HStack>
<CardMenu
experimentId={exp.id}
experimentSlug={exp.slug}
setIsMenuHovered={setIsMenuHovered}
/>
</HStack>
<HStack h="full" spacing={4} flex={1} align="center">
<CountLabel label="Variants" count={exp.promptVariantCount} />
<Divider h={12} orientation="vertical" />
@@ -78,75 +57,6 @@ export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["lis
);
};
const CardMenu = ({
experimentId,
experimentSlug,
setIsMenuHovered,
}: {
experimentId: string;
experimentSlug: string;
setIsMenuHovered: (isHovered: boolean) => void;
}) => {
const deleteDisclosure = useDisclosure();
const menuDisclosure = useDisclosure();
const toast = useToast();
const [copyShareLink] = useHandledAsyncCallback(
async (e: MouseEvent<HTMLButtonElement>) => {
if (typeof window === "undefined") return;
e.preventDefault();
e.stopPropagation();
const shareLink = `${window.location.origin}/experiments/${experimentSlug}`;
await navigator.clipboard.writeText(shareLink);
toast({
title: "Share link copied to clipboard",
status: "success",
duration: 2000,
isClosable: true,
});
menuDisclosure.onClose();
},
[toast, menuDisclosure.onClose, experimentSlug],
);
return (
<>
<Menu isLazy {...menuDisclosure}>
<MenuButton
as={IconButton}
aria-label="Options"
icon={<BsThreeDotsVertical />}
variant="ghost"
onClick={(e) => {
e.preventDefault();
e.stopPropagation();
menuDisclosure.onOpen();
}}
onMouseEnter={() => setIsMenuHovered(true)}
onMouseLeave={() => setIsMenuHovered(false)}
boxSize={6}
minW={0}
/>
<MenuList>
<MenuItem icon={<Icon as={BsLink45Deg} boxSize={5} />} onClick={copyShareLink}>
Copy Link
</MenuItem>
<MenuItem
icon={<Icon as={BsTrash} boxSize={5} />}
onClick={(e) => {
e.preventDefault();
e.stopPropagation();
deleteDisclosure.onOpen();
}}
color="red.500"
>
Delete
</MenuItem>
</MenuList>
</Menu>
<DeleteExperimentDialog experimentId={experimentId} disclosure={deleteDisclosure} />
</>
);
};
const CountLabel = ({ label, count }: { label: string; count: number }) => {
return (
<VStack alignItems="center" flex={1}>
@@ -188,7 +98,9 @@ export const NewExperimentCard = () => {
>
<VStack align="center" justify="center" w="full" h="full" p={4} onClick={createExperiment}>
<Icon as={isLoading ? Spinner : BsPlusSquare} boxSize={8} />
<Text ml={2}>New Experiment</Text>
<Text display={{ base: "none", md: "block" }} ml={2}>
New Experiment
</Text>
</VStack>
</Card>
);

View File

@@ -1,65 +0,0 @@
import { Card, Table, Thead, Tr, Th, Tbody, Td, VStack, Icon, Text } from "@chakra-ui/react";
import { FaTable } from "react-icons/fa";
import { type FineTuneStatus } from "@prisma/client";
import dayjs from "~/utils/dayjs";
import { useFineTunes } from "~/utils/hooks";
const FineTunesTable = ({}) => {
const { data } = useFineTunes();
const fineTunes = data?.fineTunes || [];
return (
<Card width="100%" overflowX="auto">
{fineTunes.length ? (
<Table>
<Thead>
<Tr>
<Th>ID</Th>
<Th>Created At</Th>
<Th>Base Model</Th>
<Th>Dataset Size</Th>
<Th>Status</Th>
</Tr>
</Thead>
<Tbody>
{fineTunes.map((fineTune) => {
return (
<Tr key={fineTune.id}>
<Td>{fineTune.slug}</Td>
<Td>{dayjs(fineTune.createdAt).format("MMMM D h:mm A")}</Td>
<Td>{fineTune.baseModel}</Td>
<Td>{fineTune.dataset._count.datasetEntries}</Td>
<Td fontSize="sm" fontWeight="bold">
<Text color={getStatusColor(fineTune.status)}>{fineTune.status}</Text>
</Td>
</Tr>
);
})}
</Tbody>
</Table>
) : (
<VStack py={8}>
<Icon as={FaTable} boxSize={16} color="gray.300" />
<Text color="gray.400" fontSize="lg" fontWeight="bold">
No Fine Tunes Found
</Text>
</VStack>
)}
</Card>
);
};
export default FineTunesTable;
const getStatusColor = (status: FineTuneStatus) => {
switch (status) {
case "DEPLOYED":
return "green.500";
case "ERROR":
return "red.500";
default:
return "yellow.500";
}
};

View File

@@ -13,19 +13,14 @@ import {
} from "@chakra-ui/react";
import Head from "next/head";
import Link from "next/link";
import { useRouter } from "next/router";
import { BsGearFill, BsGithub, BsPersonCircle } from "react-icons/bs";
import { IoStatsChartOutline } from "react-icons/io5";
import { RiHome3Line, RiFlaskLine } from "react-icons/ri";
import { AiOutlineThunderbolt } from "react-icons/ai";
import { FaReadme } from "react-icons/fa";
import { RiHome3Line, RiDatabase2Line, RiFlaskLine } from "react-icons/ri";
import { signIn, useSession } from "next-auth/react";
import { env } from "~/env.mjs";
import ProjectMenu from "./ProjectMenu";
import NavSidebarOption from "./NavSidebarOption";
import IconLink from "./IconLink";
import { BetaModal } from "../BetaModal";
import { useAppStore } from "~/state/store";
const Divider = () => <Box h="1px" bgColor="gray.300" w="full" />;
@@ -76,10 +71,21 @@ const NavSidebar = () => {
<ProjectMenu />
<Divider />
<IconLink icon={RiHome3Line} label="Dashboard" href="/dashboard" />
<IconLink icon={IoStatsChartOutline} label="Request Logs" href="/request-logs" />
<IconLink icon={AiOutlineThunderbolt} label="Fine Tunes" href="/fine-tunes" beta />
{env.NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS && (
<>
<IconLink icon={RiHome3Line} label="Dashboard" href="/dashboard" beta />
<IconLink
icon={IoStatsChartOutline}
label="Request Logs"
href="/request-logs"
beta
/>
</>
)}
<IconLink icon={RiFlaskLine} label="Experiments" href="/experiments" />
{env.NEXT_PUBLIC_SHOW_DATA && (
<IconLink icon={RiDatabase2Line} label="Data" href="/data" />
)}
<VStack w="full" alignItems="flex-start" spacing={0} pt={8}>
<Text
pl={2}
@@ -99,7 +105,7 @@ const NavSidebar = () => {
<NavSidebarOption>
<HStack
w="full"
p={{ base: 2, md: 4 }}
p={4}
as={ChakraLink}
justifyContent="start"
onClick={() => {
@@ -114,22 +120,7 @@ const NavSidebar = () => {
</NavSidebarOption>
)}
</VStack>
<HStack
w="full"
px={{ base: 2, md: 4 }}
py={{ base: 1, md: 2 }}
as={ChakraLink}
justifyContent="start"
href="https://docs.openpipe.ai"
target="_blank"
color="gray.500"
spacing={1}
>
<Icon as={FaReadme} boxSize={4} mr={2} />
<Text fontWeight="bold" fontSize="sm">
Read the Docs
</Text>
</HStack>
<Divider />
<VStack spacing={0} align="center">
<ChakraLink
@@ -150,15 +141,12 @@ export default function AppShell({
children,
title,
requireAuth,
requireBeta,
}: {
children: React.ReactNode;
title?: string;
requireAuth?: boolean;
requireBeta?: boolean;
}) {
const [vh, setVh] = useState("100vh"); // Default height to prevent flicker on initial render
const router = useRouter();
useEffect(() => {
const setHeight = () => {
@@ -186,11 +174,7 @@ export default function AppShell({
}
}, [requireAuth, user, authLoading]);
const flags = useAppStore((s) => s.featureFlags.featureFlags);
const flagsLoaded = useAppStore((s) => s.featureFlags.flagsLoaded);
return (
<>
<Flex h={vh} w="100vw">
<Head>
<title>{title ? `${title} | OpenPipe` : "OpenPipe"}</title>
@@ -200,7 +184,5 @@ export default function AppShell({
{children}
</Box>
</Flex>
<BetaModal isOpen={!!requireBeta && flagsLoaded && !flags.betaAccess} onClose={router.back} />
</>
);
}

View File

@@ -14,7 +14,6 @@ import {
Link as ChakraLink,
Image,
Box,
Portal,
} from "@chakra-ui/react";
import { useEffect } from "react";
import Link from "next/link";
@@ -57,7 +56,6 @@ export default function ProjectMenu() {
await utils.projects.list.invalidate();
setSelectedProjectId(newProj.id);
await router.push({ pathname: "/project/settings" });
popover.onClose();
}, [createMutation, router]);
const user = useSession().data;
@@ -69,13 +67,7 @@ export default function ProjectMenu() {
);
return (
<VStack
w="full"
alignItems="flex-start"
spacing={0}
py={1}
zIndex={popover.isOpen ? "dropdown" : undefined}
>
<VStack w="full" alignItems="flex-start" spacing={0} py={1}>
<Popover
placement="bottom"
isOpen={popover.isOpen}
@@ -111,7 +103,6 @@ export default function ProjectMenu() {
</HStack>
</NavSidebarOption>
</PopoverTrigger>
<Portal>
<PopoverContent
_focusVisible={{ outline: "unset" }}
w={220}
@@ -170,7 +161,6 @@ export default function ProjectMenu() {
</VStack>
</VStack>
</PopoverContent>
</Portal>
</Popover>
</VStack>
);

View File

@@ -23,6 +23,7 @@ export default function UserMenu({ user, ...rest }: { user: Session } & StackPro
);
return (
<>
<Popover placement="right">
<PopoverTrigger>
<NavSidebarOption>
@@ -66,5 +67,6 @@ export default function UserMenu({ user, ...rest }: { user: Session } & StackPro
</VStack>
</PopoverContent>
</Popover>
</>
);
}

View File

@@ -1,30 +1,12 @@
import { useState } from "react";
import { Button, HStack, type ButtonProps, Icon, Text } from "@chakra-ui/react";
import { type IconType } from "react-icons";
import { useAppStore } from "~/state/store";
import { BetaModal } from "../BetaModal";
const ActionButton = ({
icon,
label,
requireBeta = false,
onClick,
...buttonProps
}: {
icon: IconType;
label: string;
requireBeta?: boolean;
onClick?: () => void;
} & ButtonProps) => {
const flags = useAppStore((s) => s.featureFlags.featureFlags);
const flagsLoaded = useAppStore((s) => s.featureFlags.flagsLoaded);
const [betaModalOpen, setBetaModalOpen] = useState(false);
const isBetaBlocked = requireBeta && flagsLoaded && !flags.betaAccess;
}: { icon: IconType; label: string } & ButtonProps) => {
return (
<>
<Button
colorScheme="blue"
color="black"
@@ -35,16 +17,13 @@ const ActionButton = ({
size="sm"
fontSize="sm"
fontWeight="normal"
onClick={isBetaBlocked ? () => setBetaModalOpen(true) : onClick}
{...buttonProps}
>
<HStack spacing={1}>
{icon && <Icon as={icon} color={requireBeta ? "orange.400" : undefined} />}
<Text display={{ base: "none", md: "flex" }}>{label}</Text>
{icon && <Icon as={icon} />}
<Text>{label}</Text>
</HStack>
</Button>
<BetaModal isOpen={betaModalOpen} onClose={() => setBetaModalOpen(false)} />
</>
);
};

View File

@@ -1,117 +0,0 @@
import {
Icon,
Popover,
PopoverTrigger,
PopoverContent,
VStack,
HStack,
Button,
Text,
useDisclosure,
Box,
} from "@chakra-ui/react";
import { BiCheck } from "react-icons/bi";
import { BsToggles } from "react-icons/bs";
import { useMemo } from "react";
import { useIsClientRehydrated, useTagNames } from "~/utils/hooks";
import { useAppStore } from "~/state/store";
import { StaticColumnKeys } from "~/state/columnVisiblitySlice";
import ActionButton from "./ActionButton";
const ColumnVisiblityDropdown = () => {
const tagNames = useTagNames().data;
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
const toggleColumnVisibility = useAppStore((s) => s.columnVisibility.toggleColumnVisibility);
const totalColumns = Object.keys(StaticColumnKeys).length + (tagNames?.length ?? 0);
const popover = useDisclosure();
const columnVisiblityOptions = useMemo(() => {
const options: { label: string; key: string }[] = [
{
label: "Sent At",
key: StaticColumnKeys.SENT_AT,
},
{
label: "Model",
key: StaticColumnKeys.MODEL,
},
{
label: "Duration",
key: StaticColumnKeys.DURATION,
},
{
label: "Input Tokens",
key: StaticColumnKeys.INPUT_TOKENS,
},
{
label: "Output Tokens",
key: StaticColumnKeys.OUTPUT_TOKENS,
},
{
label: "Status Code",
key: StaticColumnKeys.STATUS_CODE,
},
];
for (const tagName of tagNames ?? []) {
options.push({
label: tagName,
key: tagName,
});
}
return options;
}, [tagNames]);
const isClientRehydrated = useIsClientRehydrated();
if (!isClientRehydrated) return null;
return (
<Popover
placement="bottom-start"
isOpen={popover.isOpen}
onOpen={popover.onOpen}
onClose={popover.onClose}
>
<PopoverTrigger>
<Box>
<ActionButton
label={`Columns (${visibleColumns.size}/${totalColumns})`}
icon={BsToggles}
/>
</Box>
</PopoverTrigger>
<PopoverContent boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);" minW={0} w="auto">
<VStack spacing={0} maxH={400} overflowY="auto">
{columnVisiblityOptions?.map((option, index) => (
<HStack
key={index}
as={Button}
onClick={() => toggleColumnVisibility(option.key)}
w="full"
minH={10}
variant="ghost"
justifyContent="space-between"
fontWeight="semibold"
borderRadius={0}
colorScheme="blue"
color="black"
fontSize="sm"
borderBottomWidth={1}
>
<Text mr={16}>{option.label}</Text>
<Box w={5}>
{visibleColumns.has(option.key) && (
<Icon as={BiCheck} color="blue.500" boxSize={5} />
)}
</Box>
</HStack>
))}
</VStack>
</PopoverContent>
</Popover>
);
};
export default ColumnVisiblityDropdown;

View File

@@ -1,211 +0,0 @@
import { useState, useEffect } from "react";
import {
Modal,
ModalOverlay,
ModalContent,
ModalHeader,
ModalCloseButton,
ModalBody,
ModalFooter,
HStack,
VStack,
Icon,
Text,
Button,
Checkbox,
NumberInput,
NumberInputField,
NumberInputStepper,
NumberIncrementStepper,
NumberDecrementStepper,
Collapse,
Flex,
useDisclosure,
type UseDisclosureReturn,
} from "@chakra-ui/react";
import { BiExport } from "react-icons/bi";
import { useHandledAsyncCallback } from "~/utils/hooks";
import { api } from "~/utils/api";
import { useAppStore } from "~/state/store";
import ActionButton from "./ActionButton";
import InputDropdown from "../InputDropdown";
import { FiChevronUp, FiChevronDown } from "react-icons/fi";
import InfoCircle from "../InfoCircle";
const SUPPORTED_EXPORT_FORMATS = ["alpaca-finetune", "openai-fine-tune", "unformatted"];
const ExportButton = () => {
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
const disclosure = useDisclosure();
return (
<>
<ActionButton
onClick={disclosure.onOpen}
label="Export"
icon={BiExport}
isDisabled={selectedLogIds.size === 0}
requireBeta
/>
<ExportLogsModal disclosure={disclosure} />
</>
);
};
export default ExportButton;
const ExportLogsModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
const selectedProjectId = useAppStore((s) => s.selectedProjectId);
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
const clearSelectedLogIds = useAppStore((s) => s.selectedLogs.clearSelectedLogIds);
const [selectedExportFormat, setSelectedExportFormat] = useState(SUPPORTED_EXPORT_FORMATS[0]);
const [testingSplit, setTestingSplit] = useState(10);
const [removeDuplicates, setRemoveDuplicates] = useState(true);
const [showAdvancedOptions, setShowAdvancedOptions] = useState(false);
useEffect(() => {
if (disclosure.isOpen) {
setSelectedExportFormat(SUPPORTED_EXPORT_FORMATS[0]);
setTestingSplit(10);
setRemoveDuplicates(true);
}
}, [disclosure.isOpen]);
const exportLogsMutation = api.loggedCalls.export.useMutation();
const [exportLogs, exportInProgress] = useHandledAsyncCallback(async () => {
if (!selectedProjectId || !selectedLogIds.size || !testingSplit || !selectedExportFormat)
return;
const response = await exportLogsMutation.mutateAsync({
projectId: selectedProjectId,
selectedLogIds: Array.from(selectedLogIds),
testingSplit,
selectedExportFormat,
removeDuplicates,
});
const dataUrl = `data:application/pdf;base64,${response}`;
const blob = await fetch(dataUrl).then((res) => res.blob());
const url = URL.createObjectURL(blob);
const a = document.createElement("a");
a.href = url;
a.download = `data.zip`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
disclosure.onClose();
clearSelectedLogIds();
}, [
exportLogsMutation,
selectedProjectId,
selectedLogIds,
testingSplit,
selectedExportFormat,
removeDuplicates,
]);
return (
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={BiExport} />
<Text>Export Logs</Text>
</HStack>
</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
<Text>
We'll export the <b>{selectedLogIds.size}</b> logs you have selected in the format of
your choice.
</Text>
<VStack alignItems="flex-start" spacing={4}>
<Flex
flexDir={{ base: "column", md: "row" }}
alignItems={{ base: "flex-start", md: "center" }}
>
<HStack w={48} alignItems="center" spacing={1}>
<Text fontWeight="bold">Format:</Text>
<InfoCircle tooltipText="Format logs for for fine tuning or export them without formatting." />
</HStack>
<InputDropdown
options={SUPPORTED_EXPORT_FORMATS}
selectedOption={selectedExportFormat}
onSelect={(option) => setSelectedExportFormat(option)}
inputGroupProps={{ w: 48 }}
/>
</Flex>
<Flex
flexDir={{ base: "column", md: "row" }}
alignItems={{ base: "flex-start", md: "center" }}
>
<HStack w={48} alignItems="center" spacing={1}>
<Text fontWeight="bold">Testing Split:</Text>
<InfoCircle tooltipText="The percent of your logs that will be reserved for testing and saved in another file. Logs are split randomly." />
</HStack>
<HStack>
<NumberInput
defaultValue={10}
onChange={(_, num) => setTestingSplit(num)}
min={1}
max={100}
w={48}
>
<NumberInputField />
<NumberInputStepper>
<NumberIncrementStepper />
<NumberDecrementStepper />
</NumberInputStepper>
</NumberInput>
</HStack>
</Flex>
</VStack>
<VStack alignItems="flex-start" spacing={0}>
<Button
variant="unstyled"
color="blue.600"
onClick={() => setShowAdvancedOptions(!showAdvancedOptions)}
>
<HStack>
<Text>Advanced Options</Text>
<Icon as={showAdvancedOptions ? FiChevronUp : FiChevronDown} />
</HStack>
</Button>
<Collapse in={showAdvancedOptions} unmountOnExit={true}>
<VStack align="stretch" pt={4}>
<HStack>
<Checkbox
colorScheme="blue"
isChecked={removeDuplicates}
onChange={(e) => setRemoveDuplicates(e.target.checked)}
>
<Text>Remove duplicates</Text>
</Checkbox>
<InfoCircle tooltipText="To avoid overfitting and speed up training, automatically deduplicate logs with matching input and output." />
</HStack>
</VStack>
</Collapse>
</VStack>
</VStack>
</ModalBody>
<ModalFooter>
<HStack>
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
Cancel
</Button>
<Button colorScheme="blue" onClick={exportLogs} isLoading={exportInProgress} minW={24}>
Export
</Button>
</HStack>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -1,162 +0,0 @@
import { useState, useEffect } from "react";
import {
Modal,
ModalOverlay,
ModalContent,
ModalHeader,
ModalCloseButton,
ModalBody,
ModalFooter,
HStack,
VStack,
Icon,
Text,
Button,
useDisclosure,
type UseDisclosureReturn,
Input,
} from "@chakra-ui/react";
import { AiTwotoneThunderbolt } from "react-icons/ai";
import humanId from "human-id";
import { useRouter } from "next/router";
import { useHandledAsyncCallback } from "~/utils/hooks";
import { api } from "~/utils/api";
import { useAppStore } from "~/state/store";
import ActionButton from "./ActionButton";
import InputDropdown from "../InputDropdown";
import { FiChevronDown } from "react-icons/fi";
const SUPPORTED_BASE_MODELS = ["llama2-7b", "llama2-13b", "llama2-70b", "gpt-3.5-turbo"];
const FineTuneButton = () => {
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
const disclosure = useDisclosure();
return (
<>
<ActionButton
onClick={disclosure.onOpen}
label="Fine Tune"
icon={AiTwotoneThunderbolt}
isDisabled={selectedLogIds.size === 0}
requireBeta
/>
<FineTuneModal disclosure={disclosure} />
</>
);
};
export default FineTuneButton;
const FineTuneModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
const selectedProjectId = useAppStore((s) => s.selectedProjectId);
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
const clearSelectedLogIds = useAppStore((s) => s.selectedLogs.clearSelectedLogIds);
const [selectedBaseModel, setSelectedBaseModel] = useState(SUPPORTED_BASE_MODELS[0]);
const [modelSlug, setModelSlug] = useState(humanId({ separator: "-", capitalize: false }));
useEffect(() => {
if (disclosure.isOpen) {
setSelectedBaseModel(SUPPORTED_BASE_MODELS[0]);
setModelSlug(humanId({ separator: "-", capitalize: false }));
}
}, [disclosure.isOpen]);
const utils = api.useContext();
const router = useRouter();
const createFineTuneMutation = api.fineTunes.create.useMutation();
const [createFineTune, creationInProgress] = useHandledAsyncCallback(async () => {
if (!selectedProjectId || !modelSlug || !selectedBaseModel || !selectedLogIds.size) return;
await createFineTuneMutation.mutateAsync({
projectId: selectedProjectId,
slug: modelSlug,
baseModel: selectedBaseModel,
selectedLogIds: Array.from(selectedLogIds),
});
await utils.fineTunes.list.invalidate();
await router.push({ pathname: "/fine-tunes" });
clearSelectedLogIds();
disclosure.onClose();
}, [createFineTuneMutation, selectedProjectId, selectedLogIds, modelSlug, selectedBaseModel]);
return (
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={AiTwotoneThunderbolt} />
<Text>Fine Tune</Text>
</HStack>
</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
<Text>
We'll train on the <b>{selectedLogIds.size}</b> logs you've selected.
</Text>
<VStack>
<HStack spacing={2} w="full">
<Text fontWeight="bold" w={36}>
Model ID:
</Text>
<Input
value={modelSlug}
onChange={(e) => setModelSlug(e.target.value)}
w={48}
placeholder="unique-id"
onKeyDown={(e) => {
// If the user types anything other than a-z, A-Z, or 0-9, replace it with -
if (!/[a-zA-Z0-9]/.test(e.key)) {
e.preventDefault();
setModelSlug((s) => s && `${s}-`);
}
}}
/>
</HStack>
<HStack spacing={2}>
<Text fontWeight="bold" w={36}>
Base model:
</Text>
<InputDropdown
options={SUPPORTED_BASE_MODELS}
selectedOption={selectedBaseModel}
onSelect={(option) => setSelectedBaseModel(option)}
inputGroupProps={{ w: 48 }}
/>
</HStack>
</VStack>
<Button variant="unstyled" color="blue.600">
<HStack>
<Text>Advanced Options</Text>
<Icon as={FiChevronDown} />
</HStack>
</Button>
</VStack>
</ModalBody>
<ModalFooter>
<HStack>
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
Cancel
</Button>
<Button
colorScheme="blue"
onClick={createFineTune}
isLoading={creationInProgress}
minW={24}
isDisabled={!modelSlug}
>
Start Training
</Button>
</HStack>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -1,7 +1,7 @@
import { Card, Table, Tbody } from "@chakra-ui/react";
import { useState } from "react";
import { useLoggedCalls } from "~/utils/hooks";
import { TableHeader, TableRow, EmptyTableRow } from "./TableRow";
import { TableHeader, TableRow } from "./TableRow";
export default function LoggedCallsTable() {
const [expandedRow, setExpandedRow] = useState<string | null>(null);
@@ -10,10 +10,9 @@ export default function LoggedCallsTable() {
return (
<Card width="100%" overflowX="auto">
<Table>
<TableHeader showOptions />
<TableHeader showCheckbox />
<Tbody>
{loggedCalls?.calls.length ? (
loggedCalls?.calls?.map((loggedCall) => {
{loggedCalls?.calls?.map((loggedCall) => {
return (
<TableRow
key={loggedCall.id}
@@ -26,13 +25,10 @@ export default function LoggedCallsTable() {
setExpandedRow(loggedCall.id);
}
}}
showOptions
showCheckbox
/>
);
})
) : (
<EmptyTableRow />
)}
})}
</Tbody>
</Table>
</Card>

View File

@@ -13,21 +13,22 @@ import {
ButtonGroup,
Text,
Checkbox,
Link as ChakraLink,
} from "@chakra-ui/react";
import dayjs from "dayjs";
import relativeTime from "dayjs/plugin/relativeTime";
import Link from "next/link";
import dayjs from "~/utils/dayjs";
import { type RouterOutputs } from "~/utils/api";
import { FormattedJson } from "./FormattedJson";
import { useAppStore } from "~/state/store";
import { useIsClientRehydrated, useLoggedCalls, useTagNames } from "~/utils/hooks";
import { useLoggedCalls, useTagNames } from "~/utils/hooks";
import { useMemo } from "react";
import { StaticColumnKeys } from "~/state/columnVisiblitySlice";
dayjs.extend(relativeTime);
type LoggedCall = RouterOutputs["loggedCalls"]["list"]["calls"][0];
export const TableHeader = ({ showOptions }: { showOptions?: boolean }) => {
export const TableHeader = ({ showCheckbox }: { showCheckbox?: boolean }) => {
const matchingLogIds = useLoggedCalls().data?.matchingLogIds;
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
const addAll = useAppStore((s) => s.selectedLogs.addSelectedLogIds);
@@ -37,14 +38,10 @@ export const TableHeader = ({ showOptions }: { showOptions?: boolean }) => {
return matchingLogIds.every((id) => selectedLogIds.has(id));
}, [selectedLogIds, matchingLogIds]);
const tagNames = useTagNames().data;
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
const isClientRehydrated = useIsClientRehydrated();
if (!isClientRehydrated) return null;
return (
<Thead>
<Tr>
{showOptions && (
{showCheckbox && (
<Th pr={0}>
<HStack minW={16}>
<Checkbox
@@ -60,19 +57,13 @@ export const TableHeader = ({ showOptions }: { showOptions?: boolean }) => {
</HStack>
</Th>
)}
{visibleColumns.has(StaticColumnKeys.SENT_AT) && <Th>Sent At</Th>}
{visibleColumns.has(StaticColumnKeys.MODEL) && <Th>Model</Th>}
{tagNames
?.filter((tagName) => visibleColumns.has(tagName))
.map((tagName) => (
<Th key={tagName} textTransform={"none"}>
{tagName}
</Th>
))}
{visibleColumns.has(StaticColumnKeys.DURATION) && <Th isNumeric>Duration</Th>}
{visibleColumns.has(StaticColumnKeys.INPUT_TOKENS) && <Th isNumeric>Input tokens</Th>}
{visibleColumns.has(StaticColumnKeys.OUTPUT_TOKENS) && <Th isNumeric>Output tokens</Th>}
{visibleColumns.has(StaticColumnKeys.STATUS_CODE) && <Th isNumeric>Status</Th>}
<Th>Sent At</Th>
<Th>Model</Th>
{tagNames?.map((tagName) => <Th key={tagName}>{tagName}</Th>)}
<Th isNumeric>Duration</Th>
<Th isNumeric>Input tokens</Th>
<Th isNumeric>Output tokens</Th>
<Th isNumeric>Status</Th>
</Tr>
</Thead>
);
@@ -82,12 +73,12 @@ export const TableRow = ({
loggedCall,
isExpanded,
onToggle,
showOptions,
showCheckbox,
}: {
loggedCall: LoggedCall;
isExpanded: boolean;
onToggle: () => void;
showOptions?: boolean;
showCheckbox?: boolean;
}) => {
const isError = loggedCall.modelResponse?.statusCode !== 200;
const requestedAt = dayjs(loggedCall.requestedAt).format("MMMM D h:mm A");
@@ -97,14 +88,6 @@ export const TableRow = ({
const toggleChecked = useAppStore((s) => s.selectedLogs.toggleSelectedLogId);
const tagNames = useTagNames().data;
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
const visibleTagNames = useMemo(() => {
return tagNames?.filter((tagName) => visibleColumns.has(tagName)) ?? [];
}, [tagNames, visibleColumns]);
const isClientRehydrated = useIsClientRehydrated();
if (!isClientRehydrated) return null;
return (
<>
@@ -117,12 +100,11 @@ export const TableRow = ({
}}
fontSize="sm"
>
{showOptions && (
{showCheckbox && (
<Td>
<Checkbox isChecked={isChecked} onChange={() => toggleChecked(loggedCall.id)} />
</Td>
)}
{visibleColumns.has(StaticColumnKeys.SENT_AT) && (
<Td>
<Tooltip label={fullTime} placement="top">
<Box whiteSpace="nowrap" minW="120px">
@@ -130,8 +112,6 @@ export const TableRow = ({
</Box>
</Tooltip>
</Td>
)}
{visibleColumns.has(StaticColumnKeys.MODEL) && (
<Td>
<HStack justifyContent="flex-start">
<Text
@@ -148,11 +128,7 @@ export const TableRow = ({
</Text>
</HStack>
</Td>
)}
{visibleTagNames.map((tagName) => (
<Td key={tagName}>{loggedCall.tags[tagName]}</Td>
))}
{visibleColumns.has(StaticColumnKeys.DURATION) && (
{tagNames?.map((tagName) => <Td key={tagName}>{loggedCall.tags[tagName]}</Td>)}
<Td isNumeric>
{loggedCall.cacheHit ? (
<Text color="gray.500">Cached</Text>
@@ -160,21 +136,14 @@ export const TableRow = ({
((loggedCall.modelResponse?.durationMs ?? 0) / 1000).toFixed(2) + "s"
)}
</Td>
)}
{visibleColumns.has(StaticColumnKeys.INPUT_TOKENS) && (
<Td isNumeric>{loggedCall.modelResponse?.inputTokens}</Td>
)}
{visibleColumns.has(StaticColumnKeys.OUTPUT_TOKENS) && (
<Td isNumeric>{loggedCall.modelResponse?.outputTokens}</Td>
)}
{visibleColumns.has(StaticColumnKeys.STATUS_CODE) && (
<Td sx={{ color: isError ? "red.500" : "green.500", fontWeight: "semibold" }} isNumeric>
{loggedCall.modelResponse?.statusCode ?? "No response"}
</Td>
)}
</Tr>
<Tr>
<Td colSpan={visibleColumns.size + 1} w="full" p={0}>
<Td colSpan={8} p={0}>
<Collapse in={isExpanded} unmountOnExit={true}>
<VStack p={4} align="stretch">
<HStack align="stretch">
@@ -199,41 +168,3 @@ export const TableRow = ({
</>
);
};
export const EmptyTableRow = ({ filtersApplied = true }: { filtersApplied?: boolean }) => {
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
const filters = useAppStore((state) => state.logFilters.filters);
const { isLoading } = useLoggedCalls();
if (isLoading) return null;
if (filters.length && filtersApplied) {
return (
<Tr>
<Td w="full" colSpan={visibleColumns.size + 1}>
<Text color="gray.500" textAlign="center" w="full" p={4}>
No matching request logs found. Try removing some filters.
</Text>
</Td>
</Tr>
);
}
return (
<Tr>
<Td w="full" colSpan={visibleColumns.size + 1}>
<Text color="gray.500" textAlign="center" w="full" p={4}>
This project has no request logs. Learn how to add request logs to your project in our{" "}
<ChakraLink
href="https://docs.openpipe.ai/getting-started/quick-start"
target="_blank"
color="blue.600"
>
Quick Start
</ChakraLink>{" "}
guide.
</Text>
</Td>
</Tr>
);
};

View File

@@ -26,14 +26,6 @@ export const env = createEnv({
SMTP_PORT: z.string().default("placeholder"),
SMTP_LOGIN: z.string().default("placeholder"),
SMTP_PASSWORD: z.string().default("placeholder"),
WORKER_CONCURRENCY: z
.string()
.default("10")
.transform((val) => parseInt(val)),
WORKER_MAX_POOL_SIZE: z
.string()
.default("10")
.transform((val) => parseInt(val)),
},
/**
@@ -46,6 +38,8 @@ export const env = createEnv({
NEXT_PUBLIC_SOCKET_URL: z.string().url().default("http://localhost:3318"),
NEXT_PUBLIC_HOST: z.string().url().default("http://localhost:3000"),
NEXT_PUBLIC_SENTRY_DSN: z.string().optional(),
NEXT_PUBLIC_SHOW_DATA: z.string().optional(),
NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS: z.string().optional(),
},
/**
@@ -60,6 +54,7 @@ export const env = createEnv({
NEXT_PUBLIC_POSTHOG_KEY: process.env.NEXT_PUBLIC_POSTHOG_KEY,
NEXT_PUBLIC_SOCKET_URL: process.env.NEXT_PUBLIC_SOCKET_URL,
NEXT_PUBLIC_HOST: process.env.NEXT_PUBLIC_HOST,
NEXT_PUBLIC_SHOW_DATA: process.env.NEXT_PUBLIC_SHOW_DATA,
GITHUB_CLIENT_ID: process.env.GITHUB_CLIENT_ID,
GITHUB_CLIENT_SECRET: process.env.GITHUB_CLIENT_SECRET,
REPLICATE_API_TOKEN: process.env.REPLICATE_API_TOKEN,
@@ -67,13 +62,12 @@ export const env = createEnv({
NEXT_PUBLIC_SENTRY_DSN: process.env.NEXT_PUBLIC_SENTRY_DSN,
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
OPENPIPE_API_KEY: process.env.OPENPIPE_API_KEY,
NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS: process.env.NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS,
SENDER_EMAIL: process.env.SENDER_EMAIL,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_LOGIN: process.env.SMTP_LOGIN,
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
WORKER_CONCURRENCY: process.env.WORKER_CONCURRENCY,
WORKER_MAX_POOL_SIZE: process.env.WORKER_MAX_POOL_SIZE,
},
/**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.

View File

@@ -2,7 +2,7 @@
import { isArray, isString } from "lodash-es";
import { APIError } from "openai";
import { type ChatCompletion, type CompletionCreateParams } from "openai/resources/chat";
import mergeChunks from "openpipe/openai/mergeChunks";
import mergeChunks from "openpipe/src/openai/mergeChunks";
import { openai } from "~/server/utils/openai";
import { type CompletionResponse } from "../types";
@@ -16,16 +16,7 @@ export async function getCompletion(
try {
if (onStream) {
const resp = await openai.chat.completions.create(
{
...input,
stream: true,
openpipe: {
tags: {
prompt_id: "getCompletion",
stream: "true",
},
},
},
{ ...input, stream: true },
{
maxRetries: 0,
},
@@ -43,16 +34,7 @@ export async function getCompletion(
}
} else {
const resp = await openai.chat.completions.create(
{
...input,
stream: false,
openpipe: {
tags: {
prompt_id: "getCompletion",
stream: "false",
},
},
},
{ ...input, stream: false },
{
maxRetries: 0,
},

View File

@@ -7,7 +7,6 @@ import {
// templateSystemUserAssistantPrompt,
templateInstructionInputResponsePrompt,
templateAiroborosPrompt,
templateGryphePrompt,
templateVicunaPrompt,
} from "./templatePrompt";
@@ -70,15 +69,6 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatO
learnMoreUrl: "https://huggingface.co/lmsys/vicuna-13b-v1.5",
templatePrompt: templateVicunaPrompt,
},
"Gryphe/MythoMax-L2-13b": {
name: "MythoMax-L2-13b",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/Gryphe/MythoMax-L2-13b",
templatePrompt: templateGryphePrompt,
},
"NousResearch/Nous-Hermes-llama-2-7b": {
name: "Nous-Hermes-llama-2-7b",
contextWindow: 4096,

View File

@@ -13,27 +13,13 @@ const modelEndpoints: Record<OpenpipeChatInput["model"], string> = {
"NousResearch/Nous-Hermes-Llama2-13b": "https://ncv8pw3u0vb8j2-8000.proxy.runpod.net/v1",
"jondurbin/airoboros-l2-13b-gpt4-2.0": "https://9nrbx7oph4btou-8000.proxy.runpod.net/v1",
"lmsys/vicuna-13b-v1.5": "https://h88hkt3ux73rb7-8000.proxy.runpod.net/v1",
"Gryphe/MythoMax-L2-13b": "https://3l5jvhnxdgky3v-8000.proxy.runpod.net/v1",
"NousResearch/Nous-Hermes-llama-2-7b": "https://ua1bpc6kv3dgge-8000.proxy.runpod.net/v1",
};
const CUSTOM_MODELS_ENABLED = false;
export async function getCompletion(
input: OpenpipeChatInput,
onStream: ((partialOutput: OpenpipeChatOutput) => void) | null,
): Promise<CompletionResponse<OpenpipeChatOutput>> {
// Temporarily disable these models because of GPU constraints
if (!CUSTOM_MODELS_ENABLED) {
return {
type: "error",
message:
"We've disabled this model temporarily because of GPU capacity constraints. Check back later.",
autoRetry: false,
};
}
const { model, messages, ...rest } = input;
const templatedPrompt = frontendModelProvider.models[model].templatePrompt?.(messages);

View File

@@ -11,7 +11,6 @@ const supportedModels = [
"NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"Gryphe/MythoMax-L2-13b",
"NousResearch/Nous-Hermes-llama-2-7b",
] as const;

View File

@@ -11,7 +11,6 @@
"NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"Gryphe/MythoMax-L2-13b",
"NousResearch/Nous-Hermes-llama-2-7b"
]
},

View File

@@ -223,52 +223,3 @@ export const templateVicunaPrompt = (messages: OpenpipeChatInput["messages"]) =>
return prompt.trim();
};
// <System prompt/Character Card>
// ### Instruction:
// Your instruction or question here.
// For roleplay purposes, I suggest the following - Write <CHAR NAME>'s next reply in a chat between <YOUR NAME> and <CHAR NAME>. Write a single reply only.
// ### Response:
export const templateGryphePrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n";
const instructionTag = "### Instruction:\n";
const responseTag = "### Response:\n";
let combinedSystemMessage = "";
const conversationMessages = [];
for (const message of messages) {
if (message.role === "system") {
combinedSystemMessage += message.content;
} else if (message.role === "user") {
conversationMessages.push(instructionTag + message.content);
} else {
conversationMessages.push(responseTag + message.content);
}
}
let systemMessage = "";
if (combinedSystemMessage) {
// If there is no user message, add a user tag to the system message
if (conversationMessages.find((message) => message.startsWith(instructionTag))) {
systemMessage = `${combinedSystemMessage}\n\n`;
} else {
conversationMessages.unshift(instructionTag + combinedSystemMessage);
}
}
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
// Ensure that the prompt ends with an assistant message
const lastInstructionIndex = prompt.lastIndexOf(instructionTag);
const lastAssistantIndex = prompt.lastIndexOf(responseTag);
if (lastInstructionIndex > lastAssistantIndex) {
prompt += splitter + responseTag;
}
return prompt;
};

View File

@@ -8,8 +8,8 @@ const replicate = new Replicate({
});
const modelIds: Record<ReplicateLlama2Input["model"], string> = {
"7b-chat": "d24902e3fa9b698cc208b5e63136c4e26e828659a9f09827ca6ec5bb83014381",
"13b-chat": "9dff94b1bed5af738655d4a7cbcdcde2bd503aa85c94334fe1f42af7f3dd5ee3",
"7b-chat": "7b0bfc9aff140d5b75bacbed23e91fd3c34b01a1e958d32132de6e0a19796e2c",
"13b-chat": "2a7f981751ec7fdf87b5b91ad4db53683a98082e9ff7bfd12c8cd5ea85980a52",
"70b-chat": "2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1",
};

View File

@@ -0,0 +1,97 @@
import {
Box,
Breadcrumb,
BreadcrumbItem,
Center,
Flex,
Icon,
Input,
VStack,
} from "@chakra-ui/react";
import Link from "next/link";
import { useRouter } from "next/router";
import { useState, useEffect } from "react";
import { RiDatabase2Line } from "react-icons/ri";
import AppShell from "~/components/nav/AppShell";
import { api } from "~/utils/api";
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
import DatasetEntriesTable from "~/components/datasets/DatasetEntriesTable";
import { DatasetHeaderButtons } from "~/components/datasets/DatasetHeaderButtons/DatasetHeaderButtons";
import PageHeaderContainer from "~/components/nav/PageHeaderContainer";
import ProjectBreadcrumbContents from "~/components/nav/ProjectBreadcrumbContents";
export default function Dataset() {
const router = useRouter();
const utils = api.useContext();
const dataset = useDataset();
const datasetId = router.query.id as string;
const [name, setName] = useState(dataset.data?.name || "");
useEffect(() => {
setName(dataset.data?.name || "");
}, [dataset.data?.name]);
const updateMutation = api.datasets.update.useMutation();
const [onSaveName] = useHandledAsyncCallback(async () => {
if (name && name !== dataset.data?.name && dataset.data?.id) {
await updateMutation.mutateAsync({
id: dataset.data.id,
updates: { name: name },
});
await Promise.all([utils.datasets.list.invalidate(), utils.datasets.get.invalidate()]);
}
}, [updateMutation, dataset.data?.id, dataset.data?.name, name]);
if (!dataset.isLoading && !dataset.data) {
return (
<AppShell title="Dataset not found">
<Center h="100%">
<div>Dataset not found 😕</div>
</Center>
</AppShell>
);
}
return (
<AppShell title={dataset.data?.name}>
<VStack h="full">
<PageHeaderContainer>
<Breadcrumb>
<BreadcrumbItem>
<ProjectBreadcrumbContents projectName={dataset.data?.project?.name} />
</BreadcrumbItem>
<BreadcrumbItem>
<Link href="/data">
<Flex alignItems="center" _hover={{ textDecoration: "underline" }}>
<Icon as={RiDatabase2Line} boxSize={4} mr={2} /> Datasets
</Flex>
</Link>
</BreadcrumbItem>
<BreadcrumbItem isCurrentPage>
<Input
size="sm"
value={name}
onChange={(e) => setName(e.target.value)}
onBlur={onSaveName}
borderWidth={1}
borderColor="transparent"
fontSize={16}
px={0}
minW={{ base: 100, lg: 300 }}
flex={1}
_hover={{ borderColor: "gray.300" }}
_focus={{ borderColor: "blue.500", outline: "none" }}
/>
</BreadcrumbItem>
</Breadcrumb>
<DatasetHeaderButtons />
</PageHeaderContainer>
<Box w="full" overflowX="auto" flex={1} px={8} pt={8} pb={16}>
{datasetId && <DatasetEntriesTable />}
</Box>
</VStack>
</AppShell>
);
}

View File

@@ -0,0 +1,49 @@
import { SimpleGrid, Icon, Breadcrumb, BreadcrumbItem, Flex } from "@chakra-ui/react";
import AppShell from "~/components/nav/AppShell";
import { RiDatabase2Line } from "react-icons/ri";
import {
DatasetCard,
DatasetCardSkeleton,
NewDatasetCard,
} from "~/components/datasets/DatasetCard";
import PageHeaderContainer from "~/components/nav/PageHeaderContainer";
import ProjectBreadcrumbContents from "~/components/nav/ProjectBreadcrumbContents";
import { useDatasets } from "~/utils/hooks";
export default function DatasetsPage() {
const datasets = useDatasets();
return (
<AppShell title="Data" requireAuth>
<PageHeaderContainer>
<Breadcrumb>
<BreadcrumbItem>
<ProjectBreadcrumbContents />
</BreadcrumbItem>
<BreadcrumbItem minH={8}>
<Flex alignItems="center">
<Icon as={RiDatabase2Line} boxSize={4} mr={2} /> Datasets
</Flex>
</BreadcrumbItem>
</Breadcrumb>
</PageHeaderContainer>
<SimpleGrid w="full" columns={{ base: 1, md: 2, lg: 3, xl: 4 }} spacing={8} py={4} px={8}>
<NewDatasetCard />
{datasets.data && !datasets.isLoading ? (
datasets?.data?.map((dataset) => (
<DatasetCard
key={dataset.id}
dataset={{ ...dataset, numEntries: dataset._count.datasetEntries }}
/>
))
) : (
<>
<DatasetCardSkeleton />
<DatasetCardSkeleton />
<DatasetCardSkeleton />
</>
)}
</SimpleGrid>
</AppShell>
);
}

View File

@@ -124,7 +124,7 @@ export default function Experiment() {
<ExperimentHeaderButtons />
</PageHeaderContainer>
<ExperimentSettingsDrawer />
<Box w="100%" overflowX="auto" flex={1} id="output-container">
<Box w="100%" overflowX="auto" flex={1}>
<OutputsTable experimentId={experiment.data?.id} />
</Box>
</VStack>

View File

@@ -1,18 +0,0 @@
import { Text, VStack, Divider } from "@chakra-ui/react";
import FineTunesTable from "~/components/fineTunes/FineTunesTable";
import AppShell from "~/components/nav/AppShell";
export default function FineTunes() {
return (
<AppShell title="Fine Tunes" requireAuth requireBeta>
<VStack px={8} py={8} alignItems="flex-start" spacing={4} w="full">
<Text fontSize="2xl" fontWeight="bold">
Fine Tunes
</Text>
<Divider />
<FineTunesTable />
</VStack>
</AppShell>
);
}

View File

@@ -1,5 +1,5 @@
import { useState } from "react";
import { Text, VStack, Divider, HStack, Box } from "@chakra-ui/react";
import { Text, VStack, Divider, HStack } from "@chakra-ui/react";
import AppShell from "~/components/nav/AppShell";
import LoggedCallTable from "~/components/requestLogs/LoggedCallsTable";
@@ -9,9 +9,6 @@ import { useAppStore } from "~/state/store";
import { RiFlaskLine } from "react-icons/ri";
import { FiFilter } from "react-icons/fi";
import LogFilters from "~/components/requestLogs/LogFilters/LogFilters";
import ColumnVisiblityDropdown from "~/components/requestLogs/ColumnVisiblityDropdown";
import FineTuneButton from "~/components/requestLogs/FineTuneButton";
import ExportButton from "~/components/requestLogs/ExportButton";
export default function LoggedCalls() {
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
@@ -20,25 +17,12 @@ export default function LoggedCalls() {
return (
<AppShell title="Request Logs" requireAuth>
<Box h="100vh" overflowY="scroll">
<VStack px={8} py={8} alignItems="flex-start" spacing={4} w="full">
<Text fontSize="2xl" fontWeight="bold">
Request Logs
</Text>
<Divider />
<HStack w="full" justifyContent="flex-end">
<FineTuneButton />
<ActionButton
onClick={() => {
console.log("experimenting with these ids", selectedLogIds);
}}
label="Experiment"
icon={RiFlaskLine}
isDisabled={selectedLogIds.size === 0}
requireBeta
/>
<ExportButton />
<ColumnVisiblityDropdown />
<ActionButton
onClick={() => {
setFiltersShown(!filtersShown);
@@ -46,12 +30,19 @@ export default function LoggedCalls() {
label={filtersShown ? "Hide Filters" : "Show Filters"}
icon={FiFilter}
/>
<ActionButton
onClick={() => {
console.log("experimenting with these ids", selectedLogIds);
}}
label="Experiment"
icon={RiFlaskLine}
isDisabled={selectedLogIds.size === 0}
/>
</HStack>
{filtersShown && <LogFilters />}
<LoggedCallTable />
<LoggedCallsPaginator />
</VStack>
</Box>
</AppShell>
);
}

View File

@@ -0,0 +1,108 @@
import { type ChatCompletion } from "openai/resources/chat";
import { openai } from "../../utils/openai";
import { isAxiosError } from "./utils";
import { type APIResponse } from "openai/core";
import { sleep } from "~/server/utils/sleep";
const MAX_AUTO_RETRIES = 50;
const MIN_DELAY = 500; // milliseconds
const MAX_DELAY = 15000; // milliseconds
function calculateDelay(numPreviousTries: number): number {
const baseDelay = Math.min(MAX_DELAY, MIN_DELAY * Math.pow(2, numPreviousTries));
const jitter = Math.random() * baseDelay;
return baseDelay + jitter;
}
const getCompletionWithBackoff = async (
getCompletion: () => Promise<APIResponse<ChatCompletion>>,
) => {
let completion;
let tries = 0;
while (tries < MAX_AUTO_RETRIES) {
try {
completion = await getCompletion();
break;
} catch (e) {
if (isAxiosError(e)) {
console.error(e?.response?.data?.error?.message);
} else {
await sleep(calculateDelay(tries));
console.error(e);
}
}
tries++;
}
return completion;
};
// TODO: Add seeds to ensure batches don't contain duplicate data
const MAX_BATCH_SIZE = 5;
export const autogenerateDatasetEntries = async (
numToGenerate: number,
inputDescription: string,
outputDescription: string,
): Promise<{ input: string; output: string }[]> => {
const batchSizes = Array.from({ length: Math.ceil(numToGenerate / MAX_BATCH_SIZE) }, (_, i) =>
i === Math.ceil(numToGenerate / MAX_BATCH_SIZE) - 1 && numToGenerate % MAX_BATCH_SIZE
? numToGenerate % MAX_BATCH_SIZE
: MAX_BATCH_SIZE,
);
const getCompletion = (batchSize: number) =>
openai.chat.completions.create({
model: "gpt-4",
messages: [
{
role: "system",
content: `The user needs ${batchSize} rows of data, each with an input and an output.\n---\n The input should follow these requirements: ${inputDescription}\n---\n The output should follow these requirements: ${outputDescription}`,
},
],
functions: [
{
name: "add_list_of_data",
description: "Add a list of data to the database",
parameters: {
type: "object",
properties: {
rows: {
type: "array",
description: "The rows of data that match the description",
items: {
type: "object",
properties: {
input: {
type: "string",
description: "The input for this row",
},
output: {
type: "string",
description: "The output for this row",
},
},
},
},
},
},
},
],
function_call: { name: "add_list_of_data" },
temperature: 0.5,
});
const completionCallbacks = batchSizes.map((batchSize) =>
getCompletionWithBackoff(() => getCompletion(batchSize)),
);
const completions = await Promise.all(completionCallbacks);
const rows = completions.flatMap((completion) => {
const parsed = JSON.parse(
completion?.choices[0]?.message?.function_call?.arguments ?? "{rows: []}",
) as { rows: { input: string; output: string }[] };
return parsed.rows;
});
return rows;
};

View File

@@ -98,11 +98,6 @@ export const autogenerateScenarioValues = async (
function_call: { name: "add_scenario" },
temperature: 0.5,
openpipe: {
tags: {
prompt_id: "autogenerateScenarioValues",
},
},
});
const parsed = JSON.parse(

View File

@@ -66,7 +66,7 @@ export const v1ApiRouter = createOpenApiRouter({
if (!existingResponse) return { respPayload: null };
const newCall = await prisma.loggedCall.create({
await prisma.loggedCall.create({
data: {
projectId: ctx.key.projectId,
requestedAt: new Date(input.requestedAt),
@@ -75,7 +75,11 @@ export const v1ApiRouter = createOpenApiRouter({
},
});
await createTags(newCall.projectId, newCall.id, input.tags);
await createTags(
existingResponse.originalLoggedCall.projectId,
existingResponse.originalLoggedCallId,
input.tags,
);
return {
respPayload: existingResponse.respPayload,
};
@@ -107,7 +111,7 @@ export const v1ApiRouter = createOpenApiRouter({
.default({}),
}),
)
.output(z.object({ status: z.union([z.literal("ok"), z.literal("error")]) }))
.output(z.object({ status: z.literal("ok") }))
.mutation(async ({ input, ctx }) => {
const reqPayload = await reqValidator.spa(input.reqPayload);
const respPayload = await respValidator.spa(input.respPayload);
@@ -208,7 +212,6 @@ export const v1ApiRouter = createOpenApiRouter({
createdAt: true,
cacheHit: true,
tags: true,
id: true,
modelResponse: {
select: {
id: true,
@@ -234,7 +237,7 @@ async function createTags(projectId: string, loggedCallId: string, tags: Record<
const tagsToCreate = Object.entries(tags).map(([name, value]) => ({
projectId,
loggedCallId,
name: name.replaceAll(/[^a-zA-Z0-9_$.]/g, "_"),
name: name.replaceAll(/[^a-zA-Z0-9_$]/g, "_"),
value,
}));
await prisma.loggedCallTag.createMany({

View File

@@ -6,10 +6,11 @@ import { scenarioVariantCellsRouter } from "./routers/scenarioVariantCells.route
import { scenarioVarsRouter } from "./routers/scenarioVariables.router";
import { evaluationsRouter } from "./routers/evaluations.router";
import { worldChampsRouter } from "./routers/worldChamps.router";
import { datasetsRouter } from "./routers/datasets.router";
import { datasetEntries } from "./routers/datasetEntries.router";
import { projectsRouter } from "./routers/projects.router";
import { dashboardRouter } from "./routers/dashboard.router";
import { loggedCallsRouter } from "./routers/loggedCalls.router";
import { fineTunesRouter } from "./routers/fineTunes.router";
import { usersRouter } from "./routers/users.router";
import { adminJobsRouter } from "./routers/adminJobs.router";
@@ -26,10 +27,11 @@ export const appRouter = createTRPCRouter({
scenarioVars: scenarioVarsRouter,
evaluations: evaluationsRouter,
worldChamps: worldChampsRouter,
datasets: datasetsRouter,
datasetEntries: datasetEntries,
projects: projectsRouter,
dashboard: dashboardRouter,
loggedCalls: loggedCallsRouter,
fineTunes: fineTunesRouter,
users: usersRouter,
adminJobs: adminJobsRouter,
});

View File

@@ -0,0 +1,145 @@
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { prisma } from "~/server/db";
import { requireCanModifyDataset, requireCanViewDataset } from "~/utils/accessControl";
import { autogenerateDatasetEntries } from "../autogenerate/autogenerateDatasetEntries";
export const datasetEntries = createTRPCRouter({
list: protectedProcedure
.input(z.object({ datasetId: z.string(), page: z.number(), pageSize: z.number() }))
.query(async ({ input, ctx }) => {
await requireCanViewDataset(input.datasetId, ctx);
const { datasetId, page, pageSize } = input;
const entries = await prisma.datasetEntry.findMany({
where: {
datasetId,
},
orderBy: { createdAt: "desc" },
skip: (page - 1) * pageSize,
take: pageSize,
});
const count = await prisma.datasetEntry.count({
where: {
datasetId,
},
});
return {
entries,
count,
};
}),
createOne: protectedProcedure
.input(
z.object({
datasetId: z.string(),
input: z.string(),
output: z.string().optional(),
}),
)
.mutation(async ({ input, ctx }) => {
await requireCanModifyDataset(input.datasetId, ctx);
return await prisma.datasetEntry.create({
data: {
datasetId: input.datasetId,
input: input.input,
output: input.output,
},
});
}),
autogenerateEntries: protectedProcedure
.input(
z.object({
datasetId: z.string(),
numToGenerate: z.number(),
inputDescription: z.string(),
outputDescription: z.string(),
}),
)
.mutation(async ({ input, ctx }) => {
await requireCanModifyDataset(input.datasetId, ctx);
const dataset = await prisma.dataset.findUnique({
where: {
id: input.datasetId,
},
});
if (!dataset) {
throw new Error(`Dataset with id ${input.datasetId} does not exist`);
}
const entries = await autogenerateDatasetEntries(
input.numToGenerate,
input.inputDescription,
input.outputDescription,
);
const createdEntries = await prisma.datasetEntry.createMany({
data: entries.map((entry) => ({
datasetId: input.datasetId,
input: entry.input,
output: entry.output,
})),
});
return createdEntries;
}),
delete: protectedProcedure
.input(z.object({ id: z.string() }))
.mutation(async ({ input, ctx }) => {
const datasetId = (
await prisma.datasetEntry.findUniqueOrThrow({
where: { id: input.id },
})
).datasetId;
await requireCanModifyDataset(datasetId, ctx);
return await prisma.datasetEntry.delete({
where: {
id: input.id,
},
});
}),
update: protectedProcedure
.input(
z.object({
id: z.string(),
updates: z.object({
input: z.string(),
output: z.string().optional(),
}),
}),
)
.mutation(async ({ input, ctx }) => {
const existing = await prisma.datasetEntry.findUnique({
where: {
id: input.id,
},
});
if (!existing) {
throw new Error(`dataEntry with id ${input.id} does not exist`);
}
await requireCanModifyDataset(existing.datasetId, ctx);
return await prisma.datasetEntry.update({
where: {
id: input.id,
},
data: {
input: input.updates.input,
output: input.updates.output,
},
});
}),
});

View File

@@ -0,0 +1,88 @@
import { z } from "zod";
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
import { prisma } from "~/server/db";
import {
requireCanModifyDataset,
requireCanModifyProject,
requireCanViewDataset,
requireCanViewProject,
} from "~/utils/accessControl";
export const datasetsRouter = createTRPCRouter({
list: protectedProcedure
.input(z.object({ projectId: z.string() }))
.query(async ({ input, ctx }) => {
await requireCanViewProject(input.projectId, ctx);
const datasets = await prisma.dataset.findMany({
where: {
projectId: input.projectId,
},
orderBy: {
createdAt: "desc",
},
include: {
_count: {
select: { datasetEntries: true },
},
},
});
return datasets;
}),
get: publicProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
await requireCanViewDataset(input.id, ctx);
return await prisma.dataset.findFirstOrThrow({
where: { id: input.id },
include: {
project: true,
},
});
}),
create: protectedProcedure
.input(z.object({ projectId: z.string() }))
.mutation(async ({ input, ctx }) => {
await requireCanModifyProject(input.projectId, ctx);
const numDatasets = await prisma.dataset.count({
where: {
projectId: input.projectId,
},
});
return await prisma.dataset.create({
data: {
name: `Dataset ${numDatasets + 1}`,
projectId: input.projectId,
},
});
}),
update: protectedProcedure
.input(z.object({ id: z.string(), updates: z.object({ name: z.string() }) }))
.mutation(async ({ input, ctx }) => {
await requireCanModifyDataset(input.id, ctx);
return await prisma.dataset.update({
where: {
id: input.id,
},
data: {
name: input.updates.name,
},
});
}),
delete: protectedProcedure
.input(z.object({ id: z.string() }))
.mutation(async ({ input, ctx }) => {
await requireCanModifyDataset(input.id, ctx);
await prisma.dataset.delete({
where: {
id: input.id,
},
});
}),
});

View File

@@ -178,7 +178,6 @@ export const experimentsRouter = createTRPCRouter({
existingToNewVariantIds.set(variant.id, newVariantId);
variantsToCreate.push({
...variant,
uiId: uuidv4(),
id: newVariantId,
experimentId: newExperimentId,
});
@@ -192,7 +191,6 @@ export const experimentsRouter = createTRPCRouter({
scenariosToCreate.push({
...scenario,
id: newScenarioId,
uiId: uuidv4(),
experimentId: newExperimentId,
variableValues: scenario.variableValues as Prisma.InputJsonValue,
});

View File

@@ -1,113 +0,0 @@
import { z } from "zod";
import { v4 as uuidv4 } from "uuid";
import { type Prisma } from "@prisma/client";
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { prisma } from "~/server/db";
import { requireCanViewProject, requireCanModifyProject } from "~/utils/accessControl";
import { error, success } from "~/utils/errorHandling/standardResponses";
export const fineTunesRouter = createTRPCRouter({
list: protectedProcedure
.input(
z.object({
projectId: z.string(),
page: z.number(),
pageSize: z.number(),
}),
)
.query(async ({ input, ctx }) => {
const { projectId, page, pageSize } = input;
await requireCanViewProject(projectId, ctx);
const fineTunes = await prisma.fineTune.findMany({
where: {
projectId,
},
include: {
dataset: {
include: {
_count: {
select: {
datasetEntries: true,
},
},
},
},
},
orderBy: { createdAt: "asc" },
skip: (page - 1) * pageSize,
take: pageSize,
});
const count = await prisma.fineTune.count({
where: {
projectId,
},
});
return {
fineTunes,
count,
};
}),
create: protectedProcedure
.input(
z.object({
projectId: z.string(),
selectedLogIds: z.array(z.string()),
slug: z.string(),
baseModel: z.string(),
}),
)
.mutation(async ({ input, ctx }) => {
await requireCanModifyProject(input.projectId, ctx);
const existingFineTune = await prisma.fineTune.findFirst({
where: {
slug: input.slug,
},
});
if (existingFineTune) {
return error("A fine tune with that slug already exists");
}
const newDatasetId = uuidv4();
const datasetEntriesToCreate: Prisma.DatasetEntryCreateManyDatasetInput[] =
input.selectedLogIds.map((loggedCallId) => ({
loggedCallId,
}));
await prisma.$transaction([
prisma.dataset.create({
data: {
id: newDatasetId,
name: input.slug,
project: {
connect: {
id: input.projectId,
},
},
datasetEntries: {
createMany: {
data: datasetEntriesToCreate,
},
},
},
}),
prisma.fineTune.create({
data: {
projectId: input.projectId,
slug: input.slug,
baseModel: input.baseModel,
datasetId: newDatasetId,
},
}),
]);
return success();
}),
});

View File

@@ -1,16 +1,11 @@
import { z } from "zod";
import { type Expression, type SqlBool, sql, type RawBuilder } from "kysely";
import { jsonArrayFrom } from "kysely/helpers/postgres";
import archiver from "archiver";
import { WritableStreamBuffer } from "stream-buffers";
import { type JsonValue } from "type-fest";
import { shuffle } from "lodash-es";
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { kysely, prisma } from "~/server/db";
import { comparators, defaultFilterableFields } from "~/state/logFiltersSlice";
import { requireCanViewProject } from "~/utils/accessControl";
import hashObject from "~/server/utils/hashObject";
// create comparator type based off of comparators
const comparatorToSqlExpression = (comparator: (typeof comparators)[number], value: string) => {
@@ -185,102 +180,4 @@ export const loggedCallsRouter = createTRPCRouter({
return tags.map((tag) => tag.name);
}),
export: protectedProcedure
.input(
z.object({
projectId: z.string(),
selectedLogIds: z.string().array(),
testingSplit: z.number(),
selectedExportFormat: z.string(),
removeDuplicates: z.boolean(),
}),
)
.mutation(async ({ input, ctx }) => {
await requireCanViewProject(input.projectId, ctx);
// Fetch the real data using Prisma
const loggedCallsFromDb = await ctx.prisma.loggedCallModelResponse.findMany({
where: {
originalLoggedCall: {
projectId: input.projectId,
id: { in: input.selectedLogIds },
},
statusCode: 200,
},
});
// Convert the database data into the desired format
let formattedLoggedCalls: { instruction: JsonValue[]; output: JsonValue }[] =
loggedCallsFromDb.map((call) => ({
instruction: (call.reqPayload as unknown as Record<string, unknown>)
.messages as JsonValue[],
output: (call.respPayload as unknown as { choices: { message: unknown }[] }).choices[0]
?.message as JsonValue,
}));
if (input.removeDuplicates) {
const deduplicatedLoggedCalls = [];
const loggedCallHashSet = new Set<string>();
for (const loggedCall of formattedLoggedCalls) {
const loggedCallHash = hashObject(loggedCall);
if (!loggedCallHashSet.has(loggedCallHash)) {
loggedCallHashSet.add(loggedCallHash);
deduplicatedLoggedCalls.push(loggedCall);
}
}
formattedLoggedCalls = deduplicatedLoggedCalls;
}
// Remove duplicate messages from instructions
const instructionMessageHashMap = new Map<string, number>();
for (const loggedCall of formattedLoggedCalls) {
for (const message of loggedCall.instruction) {
const hash = hashObject(message);
if (instructionMessageHashMap.has(hash)) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
instructionMessageHashMap.set(hash, instructionMessageHashMap.get(hash)! + 1);
} else {
instructionMessageHashMap.set(hash, 0);
}
}
}
for (const loggedCall of formattedLoggedCalls) {
loggedCall.instruction = loggedCall.instruction.filter((message) => {
const hash = hashObject(message);
// If the same message appears in a single instruction multiple times, there is some danger of
// it being removed from all logged calls. This is enough of an edge case that we don't
// need to worry about it for now.
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return instructionMessageHashMap.get(hash)! < formattedLoggedCalls.length;
});
}
// Stringify instructions and outputs
const stringifiedLoggedCalls = shuffle(formattedLoggedCalls).map((loggedCall) => ({
instruction: JSON.stringify(loggedCall.instruction),
output: JSON.stringify(loggedCall.output),
}));
const splitIndex = Math.floor((stringifiedLoggedCalls.length * input.testingSplit) / 100);
const testingData = stringifiedLoggedCalls.slice(0, splitIndex);
const trainingData = stringifiedLoggedCalls.slice(splitIndex);
// Convert arrays to JSONL format
const trainingDataJSONL = trainingData.map((item) => JSON.stringify(item)).join("\n");
const testingDataJSONL = testingData.map((item) => JSON.stringify(item)).join("\n");
const output = new WritableStreamBuffer();
const archive = archiver("zip");
archive.pipe(output);
archive.append(trainingDataJSONL, { name: "train.jsonl" });
archive.append(testingDataJSONL, { name: "test.jsonl" });
await archive.finalize();
// Convert buffer to base64
const base64 = output.getContents().toString("base64");
return base64;
}),
});

View File

@@ -196,10 +196,7 @@ export const promptVariantsRouter = createTRPCRouter({
? `${originalVariant?.label} Copy`
: `Prompt Variant ${largestSortIndex + 2}`;
const newConstructFn = await deriveNewConstructFn(
originalVariant,
originalVariant?.promptConstructor,
);
const newConstructFn = await deriveNewConstructFn(originalVariant);
const createNewVariantAction = prisma.promptVariant.create({
data: {
@@ -301,7 +298,6 @@ export const promptVariantsRouter = createTRPCRouter({
.input(
z.object({
id: z.string(),
originalPromptFn: z.string(),
instructions: z.string().optional(),
newModel: z
.object({
@@ -319,21 +315,22 @@ export const promptVariantsRouter = createTRPCRouter({
});
await requireCanModifyExperiment(existing.experimentId, ctx);
const constructedPrompt = await parsePromptConstructor(existing.promptConstructor);
if ("error" in constructedPrompt) {
return error(constructedPrompt.error);
}
const model = input.newModel
? modelProviders[input.newModel.provider].models[input.newModel.model]
: undefined;
const promptConstructionFn = await deriveNewConstructFn(
existing,
input.originalPromptFn,
model,
input.instructions,
);
const promptConstructionFn = await deriveNewConstructFn(existing, model, input.instructions);
// TODO: Validate promptConstructionFn
// TODO: Record in some sort of history
return success(promptConstructionFn);
return promptConstructionFn;
}),
replaceVariant: protectedProcedure

View File

@@ -61,7 +61,7 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
evalsComplete,
};
}),
hardRefetch: protectedProcedure
forceRefetch: protectedProcedure
.input(
z.object({
scenarioId: z.string(),
@@ -85,10 +85,7 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
});
if (!cell) {
await generateNewCell(input.variantId, input.scenarioId, {
stream: true,
hardRefetch: true,
});
await generateNewCell(input.variantId, input.scenarioId, { stream: true });
return;
}
@@ -99,7 +96,7 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
},
});
await queueQueryModel(cell.id, { stream: true, hardRefetch: true });
await queueQueryModel(cell.id, true);
}),
getTemplatedPromptMessage: publicProcedure
.input(

View File

@@ -0,0 +1,19 @@
import "dotenv/config";
import { openai } from "../utils/openai";
const resp = await openai.chat.completions.create({
model: "gpt-3.5-turbo-0613",
stream: true,
messages: [
{
role: "user",
content: "count to 20",
},
],
});
for await (const part of resp) {
console.log("part", part);
}
console.log("final resp", resp);

View File

@@ -1,4 +1,4 @@
import { type Helpers, type Task, makeWorkerUtils, TaskSpec } from "graphile-worker";
import { type Helpers, type Task, makeWorkerUtils } from "graphile-worker";
import { env } from "~/env.mjs";
let workerUtilsPromise: ReturnType<typeof makeWorkerUtils> | null = null;
@@ -16,11 +16,9 @@ function defineTask<TPayload>(
taskIdentifier: string,
taskHandler: (payload: TPayload, helpers: Helpers) => Promise<void>,
) {
const enqueue = async (payload: TPayload, spec?: TaskSpec) => {
const enqueue = async (payload: TPayload, runAt?: Date) => {
console.log("Enqueuing task", taskIdentifier, payload);
const utils = await workerUtils();
return await utils.addJob(taskIdentifier, payload, spec);
await (await workerUtils()).addJob(taskIdentifier, payload, { runAt });
};
const handler = (payload: TPayload, helpers: Helpers) => {

View File

@@ -25,6 +25,7 @@ function calculateDelay(numPreviousTries: number): number {
}
export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) => {
console.log("RUNNING TASK", task);
const { cellId, stream, numPreviousTries } = task;
const cell = await prisma.scenarioVariantCell.findUnique({
where: { id: cellId },
@@ -152,7 +153,7 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
stream,
numPreviousTries: numPreviousTries + 1,
},
{ runAt: retryTime, jobKey: cellId, priority: 3 },
retryTime,
);
await prisma.scenarioVariantCell.update({
where: { id: cellId },
@@ -171,13 +172,7 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
}
});
export const queueQueryModel = async (
cellId: string,
options: { stream?: boolean; hardRefetch?: boolean } = {},
) => {
// Hard refetches are higher priority than streamed queries, which are higher priority than non-streamed queries.
const jobPriority = options.hardRefetch ? 0 : options.stream ? 1 : 2;
export const queueQueryModel = async (cellId: string, stream: boolean) => {
await Promise.all([
prisma.scenarioVariantCell.update({
where: {
@@ -189,13 +184,6 @@ export const queueQueryModel = async (
jobQueuedAt: new Date(),
},
}),
queryModel.enqueue(
{ cellId, stream: options.stream ?? false, numPreviousTries: 0 },
// Streamed queries are higher priority than non-streamed queries. Lower
// numbers are higher priority in graphile-worker.
{ jobKey: cellId, priority: jobPriority },
),
queryModel.enqueue({ cellId, stream, numPreviousTries: 0 }),
]);
};

View File

@@ -13,6 +13,5 @@ export const runNewEval = defineTask<RunNewEvalJob>("runNewEval", async (task) =
});
export const queueRunNewEval = async (experimentId: string) => {
// Evals are lower priority than completions
await runNewEval.enqueue({ experimentId }, { priority: 4 });
await runNewEval.enqueue({ experimentId });
};

View File

@@ -1,47 +0,0 @@
import "dotenv/config";
import defineTask from "./defineTask";
import { type TaskList, run } from "graphile-worker";
import { env } from "~/env.mjs";
import "../../../sentry.server.config";
export type TestTask = { i: number };
// When a new eval is created, we want to run it on all existing outputs, but return the new eval first
export const testTask = defineTask<TestTask>("testTask", (task) => {
console.log("ran task ", task.i);
void new Promise((_resolve, reject) => setTimeout(reject, 500));
return Promise.resolve();
});
const registeredTasks = [testTask];
const taskList = registeredTasks.reduce((acc, task) => {
acc[task.task.identifier] = task.task.handler;
return acc;
}, {} as TaskList);
// process.on("unhandledRejection", (reason, promise) => {
// console.log("Unhandled Rejection at:", reason?.stack || reason);
// });
// Run a worker to execute jobs:
const runner = await run({
connectionString: env.DATABASE_URL,
concurrency: 10,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false,
pollInterval: 1000,
taskList,
});
console.log("Worker successfully started");
for (let i = 0; i < 10; i++) {
await testTask.enqueue({ i });
await new Promise((resolve) => setTimeout(resolve, 1000));
}
await runner.promise;

View File

@@ -1,6 +1,5 @@
import { type TaskList, run } from "graphile-worker";
import "dotenv/config";
import "../../../sentry.server.config";
import { env } from "~/env.mjs";
import { queryModel } from "./queryModel.task";
@@ -18,8 +17,7 @@ const taskList = registeredTasks.reduce((acc, task) => {
// Run a worker to execute jobs:
const runner = await run({
connectionString: env.DATABASE_URL,
concurrency: env.WORKER_CONCURRENCY,
maxPoolSize: env.WORKER_MAX_POOL_SIZE,
concurrency: 10,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false,
pollInterval: 1000,

View File

@@ -12,43 +12,36 @@ const isolate = new ivm.Isolate({ memoryLimit: 128 });
export async function deriveNewConstructFn(
originalVariant: PromptVariant | null,
originalPromptFn?: string,
newModel?: Model,
instructions?: string,
) {
if (originalPromptFn && !newModel && !instructions) {
return originalPromptFn;
if (originalVariant && !newModel && !instructions) {
return originalVariant.promptConstructor;
}
if (originalVariant && originalPromptFn && (newModel || instructions)) {
return await requestUpdatedPromptFunction(
originalVariant,
originalPromptFn,
newModel,
instructions,
);
if (originalVariant && (newModel || instructions)) {
return await requestUpdatedPromptFunction(originalVariant, newModel, instructions);
}
return dedent`
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo-0613",
prompt = {
model: "gpt-3.5-turbo",
messages: [
{
role: "system",
content: \`Hello, world!\`,
},
],
});`;
content: "Return 'Hello, world!'",
}
]
}`;
}
const NUM_RETRIES = 5;
const requestUpdatedPromptFunction = async (
originalVariant: PromptVariant,
originalPromptFn: string,
newModel?: Model,
instructions?: string,
) => {
const originalModelProvider = modelProviders[originalVariant.modelProvider as SupportedProvider];
const originalModel = originalModelProvider.models[originalVariant.model] as Model;
let newConstructionFn = "";
let newContructionFn = "";
for (let i = 0; i < NUM_RETRIES; i++) {
try {
const messages: CreateChatCompletionRequestMessage[] = [
@@ -62,7 +55,7 @@ const requestUpdatedPromptFunction = async (
},
{
role: "user",
content: `This is the current prompt constructor function:\n---\n${originalPromptFn}`,
content: `This is the current prompt constructor function:\n---\n${originalVariant.promptConstructor}`,
},
];
if (newModel) {
@@ -116,12 +109,6 @@ const requestUpdatedPromptFunction = async (
function_call: {
name: "update_prompt_constructor_function",
},
openpipe: {
tags: {
prompt_id: "deriveNewConstructFn",
model_translation: (!!newModel).toString(),
},
},
});
const argString = completion.choices[0]?.message?.function_call?.arguments || "{}";
@@ -144,7 +131,7 @@ const requestUpdatedPromptFunction = async (
const args = await contructPromptFunctionArgs.copy(); // Get the actual value from the isolate
if (args && isObject(args) && "new_prompt_function" in args) {
newConstructionFn = await formatPromptConstructor(args.new_prompt_function as string);
newContructionFn = await formatPromptConstructor(args.new_prompt_function as string);
break;
}
} catch (e) {
@@ -152,5 +139,5 @@ const requestUpdatedPromptFunction = async (
}
}
return newConstructionFn;
return newContructionFn;
};

View File

@@ -9,8 +9,10 @@ import parsePromptConstructor from "~/promptConstructor/parse";
export const generateNewCell = async (
variantId: string,
scenarioId: string,
options: { stream?: boolean; hardRefetch?: boolean } = {},
options?: { stream?: boolean },
): Promise<void> => {
const stream = options?.stream ?? false;
const variant = await prisma.promptVariant.findUnique({
where: {
id: variantId,
@@ -119,6 +121,6 @@ export const generateNewCell = async (
}),
);
} else {
await queueQueryModel(cell.id, options);
await queueQueryModel(cell.id, stream);
}
};

View File

@@ -1,6 +1,6 @@
import fs from "fs";
import path from "path";
import OpenAI, { type ClientOptions } from "openpipe/openai";
import OpenAI, { type ClientOptions } from "openpipe/src/openai";
import { env } from "~/env.mjs";
@@ -17,7 +17,13 @@ try {
// Set a dummy key so it doesn't fail at build time
config = {
apiKey: env.OPENAI_API_KEY ?? "dummy-key",
openpipe: {
apiKey: env.OPENPIPE_API_KEY,
baseUrl: "http://localhost:3000/api/v1",
},
};
}
// export const openai = env.OPENPIPE_API_KEY ? new OpenAI.OpenAI(config) : new OriginalOpenAI(config);
export const openai = new OpenAI(config);

View File

@@ -53,11 +53,6 @@ export const runGpt4Eval = async (
},
},
],
openpipe: {
tags: {
prompt_id: "runOneEval",
},
},
});
try {

View File

@@ -1,37 +0,0 @@
import { type SliceCreator } from "./store";
export const comparators = ["=", "!=", "CONTAINS", "NOT_CONTAINS"] as const;
export const defaultFilterableFields = ["Request", "Response", "Model", "Status Code"] as const;
export enum StaticColumnKeys {
SENT_AT = "sentAt",
MODEL = "model",
DURATION = "duration",
INPUT_TOKENS = "inputTokens",
OUTPUT_TOKENS = "outputTokens",
STATUS_CODE = "statusCode",
}
export type ColumnVisibilitySlice = {
visibleColumns: Set<string>;
toggleColumnVisibility: (columnKey: string) => void;
showAllColumns: (columnKeys: string[]) => void;
};
export const createColumnVisibilitySlice: SliceCreator<ColumnVisibilitySlice> = (set, get) => ({
// initialize with all static columns visible
visibleColumns: new Set(Object.values(StaticColumnKeys)),
toggleColumnVisibility: (columnKey: string) =>
set((state) => {
if (state.columnVisibility.visibleColumns.has(columnKey)) {
state.columnVisibility.visibleColumns.delete(columnKey);
} else {
state.columnVisibility.visibleColumns.add(columnKey);
}
}),
showAllColumns: (columnKeys: string[]) =>
set((state) => {
state.columnVisibility.visibleColumns = new Set(columnKeys);
}),
});

View File

@@ -1,23 +0,0 @@
import { type SliceCreator } from "./store";
export type FeatureFlagsSlice = {
flagsLoaded: boolean;
featureFlags: {
betaAccess: boolean;
};
setFeatureFlags: (flags: string[] | undefined) => void;
};
export const createFeatureFlagsSlice: SliceCreator<FeatureFlagsSlice> = (set) => ({
flagsLoaded: false,
featureFlags: {
betaAccess: false,
},
setFeatureFlags: (flags) =>
set((state) => {
state.featureFlags.featureFlags = {
betaAccess: flags?.includes("betaAccess") ?? false,
};
state.featureFlags.flagsLoaded = true;
}),
});

View File

@@ -1,27 +1,13 @@
import { type PersistOptions } from "zustand/middleware/persist";
import { type State } from "./store";
import SuperJSON from "superjson";
import { merge, pick } from "lodash-es";
import { type PartialDeep } from "type-fest";
export type PersistedState = PartialDeep<State>;
export const stateToPersist = {
selectedProjectId: null as string | null,
};
export const persistOptions: PersistOptions<State, PersistedState> = {
export const persistOptions: PersistOptions<State, typeof stateToPersist> = {
name: "persisted-app-store",
partialize: (state) => ({
selectedProjectId: state.selectedProjectId,
columnVisibility: pick(state.columnVisibility, ["visibleColumns"]),
}),
merge: (saved, state) => merge(state, saved),
storage: {
getItem: (key) => {
const data = localStorage.getItem(key);
return data ? SuperJSON.parse(data) : null;
},
setItem: (key, value) => localStorage.setItem(key, SuperJSON.stringify(value)),
removeItem: (key) => localStorage.removeItem(key),
},
onRehydrateStorage: (state) => {
if (state) state.isRehydrated = true;
},
};

View File

@@ -1,26 +1,16 @@
import loader, { type Monaco } from "@monaco-editor/loader";
import { type RouterOutputs } from "~/utils/api";
import { type SliceCreator } from "./store";
import loader from "@monaco-editor/loader";
import formatPromptConstructor from "~/promptConstructor/format";
export const editorBackground = "#fafafa";
export type CreatedEditor = ReturnType<Monaco["editor"]["create"]>;
type EditorOptions = {
getContent: () => string;
setContent: (content: string) => void;
};
export type SharedVariantEditorSlice = {
monaco: null | Monaco;
monaco: null | ReturnType<typeof loader.__getMonacoInstance>;
loadMonaco: () => Promise<void>;
scenarioVars: RouterOutputs["scenarioVars"]["list"];
updateScenariosModel: () => void;
setScenarioVars: (scenarioVars: RouterOutputs["scenarioVars"]["list"]) => void;
editorOptionsMap: Record<string, EditorOptions>;
updateOptionsForEditor: (uiId: string, { getContent, setContent }: EditorOptions) => void;
};
export const createVariantEditorSlice: SliceCreator<SharedVariantEditorSlice> = (set, get) => ({
@@ -103,10 +93,4 @@ export const createVariantEditorSlice: SliceCreator<SharedVariantEditorSlice> =
);
}
},
editorOptionsMap: {},
updateOptionsForEditor: (uiId, options) => {
set((state) => {
state.sharedVariantEditor.editorOptionsMap[uiId] = options;
});
},
});

View File

@@ -8,16 +8,13 @@ import {
createVariantEditorSlice,
} from "./sharedVariantEditor.slice";
import { type APIClient } from "~/utils/api";
import { type PersistedState, persistOptions } from "./persist";
import { persistOptions, type stateToPersist } from "./persist";
import { type SelectedLogsSlice, createSelectedLogsSlice } from "./selectedLogsSlice";
import { type LogFiltersSlice, createLogFiltersSlice } from "./logFiltersSlice";
import { type ColumnVisibilitySlice, createColumnVisibilitySlice } from "./columnVisiblitySlice";
import { type FeatureFlagsSlice, createFeatureFlagsSlice } from "./featureFlags";
enableMapSet();
export type State = {
isRehydrated: boolean;
drawerOpen: boolean;
openDrawer: () => void;
closeDrawer: () => void;
@@ -28,8 +25,6 @@ export type State = {
setSelectedProjectId: (id: string) => void;
selectedLogs: SelectedLogsSlice;
logFilters: LogFiltersSlice;
columnVisibility: ColumnVisibilitySlice;
featureFlags: FeatureFlagsSlice;
};
export type SliceCreator<T> = StateCreator<State, [["zustand/immer", never]], [], T>;
@@ -37,15 +32,18 @@ export type SliceCreator<T> = StateCreator<State, [["zustand/immer", never]], []
export type SetFn = Parameters<SliceCreator<unknown>>[0];
export type GetFn = Parameters<SliceCreator<unknown>>[1];
const useBaseStore = create<State, [["zustand/persist", PersistedState], ["zustand/immer", never]]>(
const useBaseStore = create<
State,
[["zustand/persist", typeof stateToPersist], ["zustand/immer", never]]
>(
persist(
immer((set, get, ...rest) => ({
isRehydrated: false,
api: null,
setApi: (api) =>
set((state) => {
state.api = api;
}),
drawerOpen: false,
openDrawer: () =>
set((state) => {
@@ -63,8 +61,6 @@ const useBaseStore = create<State, [["zustand/persist", PersistedState], ["zusta
}),
selectedLogs: createSelectedLogsSlice(set, get, ...rest),
logFilters: createLogFiltersSlice(set, get, ...rest),
columnVisibility: createColumnVisibilitySlice(set, get, ...rest),
featureFlags: createFeatureFlagsSlice(set, get, ...rest),
})),
persistOptions,
),

View File

@@ -78,6 +78,33 @@ export const requireCanModifyProject = async (projectId: string, ctx: TRPCContex
}
};
export const requireCanViewDataset = async (datasetId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const dataset = await prisma.dataset.findFirst({
where: {
id: datasetId,
project: {
projectUsers: {
some: {
role: { in: [ProjectUserRole.ADMIN, ProjectUserRole.MEMBER] },
userId: ctx.session?.user.id,
},
},
},
},
});
if (!dataset) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
};
export const requireCanModifyDataset = async (datasetId: string, ctx: TRPCContext) => {
// Right now all users who can view a dataset can also modify it
await requireCanViewDataset(datasetId, ctx);
};
export const requireCanViewExperiment = (experimentId: string, ctx: TRPCContext): Promise<void> => {
// Right now all experiments are publicly viewable, so this is a no-op.
ctx.markAccessControlRun();

View File

@@ -1,12 +1,11 @@
"use client";
import { useSession } from "next-auth/react";
import React, { type ReactNode, useEffect } from "react";
import { PostHogProvider, useActiveFeatureFlags } from "posthog-js/react";
import { PostHogProvider } from "posthog-js/react";
import posthog from "posthog-js";
import { env } from "~/env.mjs";
import { useRouter } from "next/router";
import { useAppStore } from "~/state/store";
// Make sure we're in the browser
const inBrowser = typeof window !== "undefined";
@@ -25,14 +24,6 @@ export const PosthogAppProvider = ({ children }: { children: ReactNode }) => {
};
}, [router.events]);
const setFeatureFlags = useAppStore((s) => s.featureFlags.setFeatureFlags);
const activeFlags = useActiveFeatureFlags();
useEffect(() => {
if (activeFlags) {
setFeatureFlags(activeFlags);
}
}, [activeFlags, setFeatureFlags]);
useEffect(() => {
if (env.NEXT_PUBLIC_POSTHOG_KEY && inBrowser && session && session.user) {
posthog.init(env.NEXT_PUBLIC_POSTHOG_KEY, {

View File

@@ -26,6 +26,34 @@ export const useExperimentAccess = () => {
return useExperiment().data?.access ?? { canView: false, canModify: false };
};
export const useDatasets = () => {
const selectedProjectId = useAppStore((state) => state.selectedProjectId);
return api.datasets.list.useQuery(
{ projectId: selectedProjectId ?? "" },
{ enabled: !!selectedProjectId },
);
};
export const useDataset = () => {
const router = useRouter();
const dataset = api.datasets.get.useQuery(
{ id: router.query.id as string },
{ enabled: !!router.query.id },
);
return dataset;
};
export const useDatasetEntries = () => {
const dataset = useDataset();
const { page, pageSize } = usePageParams();
return api.datasetEntries.list.useQuery(
{ datasetId: dataset.data?.id ?? "", page, pageSize },
{ enabled: dataset.data?.id != null },
);
};
type AsyncFunction<T extends unknown[], U> = (...args: T) => Promise<U>;
export function useHandledAsyncCallback<T extends unknown[], U>(
@@ -148,13 +176,13 @@ export const useScenarioVars = () => {
);
};
export const useLoggedCalls = (applyFilters = true) => {
export const useLoggedCalls = () => {
const selectedProjectId = useAppStore((state) => state.selectedProjectId);
const { page, pageSize } = usePageParams();
const filters = useAppStore((state) => state.logFilters.filters);
const { data, isLoading, ...rest } = api.loggedCalls.list.useQuery(
{ projectId: selectedProjectId ?? "", page, pageSize, filters: applyFilters ? filters : [] },
{ projectId: selectedProjectId ?? "", page, pageSize, filters },
{ enabled: !!selectedProjectId },
);
@@ -177,22 +205,3 @@ export const useTagNames = () => {
{ enabled: !!selectedProjectId },
);
};
export const useFineTunes = () => {
const selectedProjectId = useAppStore((state) => state.selectedProjectId);
const { page, pageSize } = usePageParams();
return api.fineTunes.list.useQuery(
{ projectId: selectedProjectId ?? "", page, pageSize },
{ enabled: !!selectedProjectId },
);
};
export const useIsClientRehydrated = () => {
const isRehydrated = useAppStore((state) => state.isRehydrated);
const [isMounted, setIsMounted] = useState(false);
useEffect(() => {
setIsMounted(true);
}, []);
return isRehydrated && isMounted;
};

9
app/test-docker.sh Executable file
View File

@@ -0,0 +1,9 @@
#! /bin/bash
set -e
cd "$(dirname "$0")/.."
source app/.env
docker build . --file app/Dockerfile

View File

@@ -141,20 +141,10 @@
"type": "object",
"properties": {
"status": {
"anyOf": [
{
"type": "string",
"enum": [
"ok"
]
},
{
"type": "string",
"enum": [
"error"
]
}
]
}
},
"required": [

Some files were not shown because too many files have changed in this diff Show More