Compare commits

..

1 Commits

Author SHA1 Message Date
David Corbitt
05a932ea74 Give negative margin to account for border 2023-07-23 16:46:34 -07:00
455 changed files with 4806 additions and 28389 deletions

View File

@@ -1,5 +0,0 @@
**/node_modules/
.git
**/.venv/
**/.env*
**/.next/

View File

@@ -26,22 +26,6 @@ NEXT_PUBLIC_SOCKET_URL="http://localhost:3318"
NEXTAUTH_SECRET="your_secret" NEXTAUTH_SECRET="your_secret"
NEXTAUTH_URL="http://localhost:3000" NEXTAUTH_URL="http://localhost:3000"
NEXT_PUBLIC_HOST="http://localhost:3000"
# Next Auth Github Provider # Next Auth Github Provider
GITHUB_CLIENT_ID="your_client_id" GITHUB_CLIENT_ID="your_client_id"
GITHUB_CLIENT_SECRET="your_secret" GITHUB_CLIENT_SECRET="your_secret"
OPENPIPE_BASE_URL="http://localhost:3000/api/v1"
OPENPIPE_API_KEY="your_key"
SENDER_EMAIL="placeholder"
SMTP_HOST="placeholder"
SMTP_PORT="placeholder"
SMTP_LOGIN="placeholder"
SMTP_PASSWORD="placeholder"
# Azure credentials are necessary for uploading large training data files
AZURE_STORAGE_ACCOUNT_NAME="placeholder"
AZURE_STORAGE_ACCOUNT_KEY="placeholder"
AZURE_STORAGE_CONTAINER_NAME="placeholder"

View File

@@ -6,7 +6,7 @@ const config = {
overrides: [ overrides: [
{ {
extends: ["plugin:@typescript-eslint/recommended-requiring-type-checking"], extends: ["plugin:@typescript-eslint/recommended-requiring-type-checking"],
files: ["*.mts", "*.ts", "*.tsx"], files: ["*.ts", "*.tsx"],
parserOptions: { parserOptions: {
project: path.join(__dirname, "tsconfig.json"), project: path.join(__dirname, "tsconfig.json"),
}, },
@@ -37,7 +37,6 @@ const config = {
"warn", "warn",
{ vars: "all", varsIgnorePattern: "^_", args: "after-used", argsIgnorePattern: "^_" }, { vars: "all", varsIgnorePattern: "^_", args: "after-used", argsIgnorePattern: "^_" },
], ],
"react/no-unescaped-entities": "off",
}, },
}; };

View File

@@ -1,14 +0,0 @@
name: Sweep Fast Issue
title: 'Sweep (fast): '
description: For few-line fixes to be handled by Sweep, an AI-powered junior developer. Sweep will use GPT-3.5 to quickly create a PR for very small changes.
labels: sweep
body:
- type: textarea
id: description
attributes:
label: Details
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
placeholder: |
Bugs: The bug might be in ... file. Here are the logs: ...
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
Refactors: We are migrating this function to ... version because ...

View File

@@ -1,14 +0,0 @@
name: Sweep Slow Issue
title: 'Sweep (slow): '
description: For larger bugs, features, refactors, and tests to be handled by Sweep, an AI-powered junior developer. Sweep will perform a deeper search and more self-reviews but will take longer.
labels: sweep
body:
- type: textarea
id: description
attributes:
label: Details
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
placeholder: |
Bugs: The bug might be in ... file. Here are the logs: ...
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
Refactors: We are migrating this function to ... version because ...

View File

@@ -1,14 +0,0 @@
name: Sweep Issue
title: 'Sweep: '
description: For small bugs, features, refactors, and tests to be handled by Sweep, an AI-powered junior developer.
labels: sweep
body:
- type: textarea
id: description
attributes:
label: Details
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
placeholder: |
Bugs: The bug might be in ... file. Here are the logs: ...
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
Refactors: We are migrating this function to ... version because ...

View File

@@ -6,10 +6,6 @@ on:
push: push:
branches: [main] branches: [main]
defaults:
run:
working-directory: app
jobs: jobs:
run-checks: run-checks:
runs-on: ubuntu-latest runs-on: ubuntu-latest

44
.gitignore vendored
View File

@@ -1,6 +1,42 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# database
/prisma/db.sqlite
/prisma/db.sqlite-journal
# next.js
/.next/
/out/
next-env.d.ts
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
# do not commit any .env files to git, except for the .env.example file. https://create.t3.gg/en/usage/env-variables#using-environment-variables
.env .env
.venv/ .env*.local
*.pyc
node_modules/ # vercel
.vercel
# typescript
*.tsbuildinfo *.tsbuildinfo
dist/

View File

@@ -1,2 +1,2 @@
*.schema.json src/codegen/openai.schema.json
app/pnpm-lock.yaml pnpm-lock.yaml

View File

@@ -12,25 +12,11 @@ declare module "nextjs-routes" {
export type Route = export type Route =
| StaticRoute<"/account/signin"> | StaticRoute<"/account/signin">
| StaticRoute<"/admin/jobs">
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }> | DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
| StaticRoute<"/api/experiments/og-image">
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }> | DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
| DynamicRoute<"/api/v1/[...trpc]", { "trpc": string[] }> | DynamicRoute<"/experiments/[id]", { "id": string }>
| StaticRoute<"/api/v1/openapi">
| StaticRoute<"/dashboard">
| DynamicRoute<"/datasets/[id]", { "id": string }>
| StaticRoute<"/datasets">
| DynamicRoute<"/experiments/[experimentSlug]", { "experimentSlug": string }>
| StaticRoute<"/experiments"> | StaticRoute<"/experiments">
| StaticRoute<"/fine-tunes"> | StaticRoute<"/">;
| StaticRoute<"/">
| DynamicRoute<"/invitations/[invitationToken]", { "invitationToken": string }>
| StaticRoute<"/project/settings">
| StaticRoute<"/request-logs">
| StaticRoute<"/sentry-example-page">
| StaticRoute<"/world-champs">
| StaticRoute<"/world-champs/signup">;
interface StaticRoute<Pathname> { interface StaticRoute<Pathname> {
pathname: Pathname; pathname: Pathname;

42
Dockerfile Normal file
View File

@@ -0,0 +1,42 @@
# Adapted from https://create.t3.gg/en/deployment/docker#3-create-dockerfile
FROM node:20.1.0-bullseye as base
RUN yarn global add pnpm
# DEPS
FROM base as deps
WORKDIR /app
COPY prisma ./
COPY package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile
# BUILDER
FROM base as builder
# Include all NEXT_PUBLIC_* env vars here
ARG NEXT_PUBLIC_POSTHOG_KEY
ARG NEXT_PUBLIC_SOCKET_URL
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
RUN SKIP_ENV_VALIDATION=1 pnpm build
# RUNNER
FROM base as runner
WORKDIR /app
ENV NODE_ENV production
ENV NEXT_TELEMETRY_DISABLED 1
COPY --from=builder /app/ ./
EXPOSE 3000
ENV PORT 3000
# Run the "run-prod.sh" script
CMD /app/run-prod.sh

109
README.md
View File

@@ -1,80 +1,50 @@
<p align="center"> <img src="https://github.com/openpipe/openpipe/assets/41524992/ca59596e-eb80-40f9-921f-6d67f6e6d8fa" width="72px" />
<a href="https://openpipe.ai">
<img height="70" src="https://github.com/openpipe/openpipe/assets/41524992/70af25fb-1f90-42d9-8a20-3606e3b5aaba" alt="logo">
</a>
</p>
<h1 align="center">
OpenPipe
</h1>
<p align="center"> # OpenPipe
<i>Turn expensive prompts into cheap fine-tuned models.</i>
</p>
<p align="center">
<a href="/LICENSE"><img alt="License Apache-2.0" src="https://img.shields.io/github/license/openpipe/openpipe?style=flat-square"></a>
<a href='http://makeapullrequest.com'><img alt='PRs Welcome' src='https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square'/></a>
<a href="https://github.com/openpipe/openpipe/graphs/commit-activity"><img alt="GitHub commit activity" src="https://img.shields.io/github/commit-activity/m/openpipe/openpipe?style=flat-square"/></a>
<a href="https://github.com/openpipe/openpipe/issues"><img alt="GitHub closed issues" src="https://img.shields.io/github/issues-closed/openpipe/openpipe?style=flat-square"/></a>
<img src="https://img.shields.io/badge/Y%20Combinator-S23-orange?style=flat-square" alt="Y Combinator S23">
</p>
<p align="center">
<a href="https://app.openpipe.ai/">Hosted App</a> - <a href="#running-locally">Running Locally</a> - <a href="#sample-experiments">Experiments</a>
</p>
<br>
Use powerful but expensive LLMs to fine-tune smaller and cheaper models suited to your exact needs. Evaluate model and prompt combinations in the playground. Query your past requests and export optimized training data. Try it out at https://app.openpipe.ai or <a href="#running-locally">run it locally</a>.
<br>
## Features
* <b>Experiment</b>
* Bulk-test wide-reaching scenarios using code templating.
* Seamlessly translate prompts across different model APIs.
* Tap into autogenerated scenarios for fresh test perspectives.
* <b>Fine-Tune (Beta)</b>
* Easy integration with OpenPipe's SDK in both Python and JS.
* Swiftly query logs using intuitive built-in filters.
* Export data in multiple training formats, including Alpaca and ChatGPT, with deduplication.
<img src="https://github.com/openpipe/openpipe/assets/41524992/eaa8b92d-4536-4f63-bbef-4b0b1a60f6b5" alt="fine-tune demo">
<!-- <img height="400px" src="https://github.com/openpipe/openpipe/assets/41524992/66bb1843-cb72-4130-a369-eec2df3b8201" alt="playground demo"> -->
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts with realistic sample data.
## Sample Experiments ## Sample Experiments
These are sample experiments users have created that show how OpenPipe works. Feel free to fork them and start experimenting yourself. These are simple experiments users have created that show how OpenPipe works.
- [Twitter Sentiment Analysis](https://app.openpipe.ai/experiments/62c20a73-2012-4a64-973c-4b665ad46a57) - [Country Capitals](https://app.openpipe.ai/experiments/11111111-1111-1111-1111-111111111111)
- [Reddit User Needs](https://app.openpipe.ai/experiments/22222222-2222-2222-2222-222222222222) - [Reddit User Needs](https://app.openpipe.ai/experiments/22222222-2222-2222-2222-222222222222)
- [OpenAI Function Calls](https://app.openpipe.ai/experiments/2ebbdcb3-ed51-456e-87dc-91f72eaf3e2b) - [OpenAI Function Calls](https://app.openpipe.ai/experiments/2ebbdcb3-ed51-456e-87dc-91f72eaf3e2b)
- [Activity Classification](https://app.openpipe.ai/experiments/3950940f-ab6b-4b74-841d-7e9dbc4e4ff8) - [Activity Classification](https://app.openpipe.ai/experiments/3950940f-ab6b-4b74-841d-7e9dbc4e4ff8)
<img src="https://github.com/openpipe/openpipe/assets/176426/fc7624c6-5b65-4d4d-82b7-4a816f3e5678" alt="demo" height="400px">
You can use our hosted version of OpenPipe at [https://openpipe.ai]. You can also clone this repository and [run it locally](#running-locally).
## High-Level Features
**Configure Multiple Prompts**
Set up multiple prompt configurations and compare their output side-by-side. Each configuration can be configured independently.
**Visualize Responses**
Inspect prompt completions side-by-side.
**Test Many Inputs**
OpenPipe lets you _template_ a prompt. Use the templating feature to run the prompts you're testing against many potential inputs for broader coverage of your problem space than you'd get with manual testing.
**🪄 Auto-generate Test Scenarios**
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
**Prompt Validation and Typeahead**
We use OpenAI's OpenAPI spec to automatically provide typeahead and validate prompts.
<img alt="typeahead" src="https://github.com/openpipe/openpipe/assets/176426/acc638f8-d851-4742-8d01-fe6f98890840" height="300px">
**Function Call Support**
Natively supports [OpenAI function calls](https://openai.com/blog/function-calling-and-other-api-updates) on supported models.
<img height="300px" alt="function calls" src="https://github.com/openpipe/openpipe/assets/176426/48ad13fe-af2f-4294-bf32-62015597fd9b">
## Supported Models ## Supported Models
#### OpenAI - All models available through the OpenAI [chat completion API](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
- [GPT 3.5 Turbo](https://platform.openai.com/docs/guides/gpt/chat-completions-api) - Llama2 [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat), [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat), [70b chat](https://replicate.com/replicate/llama70b-v2-chat).
- [GPT 3.5 Turbo 16k](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
- [GPT 4](https://openai.com/gpt-4)
#### Llama2
- [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat)
- [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat)
- [70b chat](https://replicate.com/replicate/llama70b-v2-chat)
#### Llama2 Fine-Tunes
- [Open-Orca/OpenOrcaxOpenChat-Preview2-13B](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B)
- [Open-Orca/OpenOrca-Platypus2-13B](https://huggingface.co/Open-Orca/OpenOrca-Platypus2-13B)
- [NousResearch/Nous-Hermes-Llama2-13b](https://huggingface.co/NousResearch/Nous-Hermes-Llama2-13b)
- [jondurbin/airoboros-l2-13b-gpt4-2.0](https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0)
- [lmsys/vicuna-13b-v1.5](https://huggingface.co/lmsys/vicuna-13b-v1.5)
- [Gryphe/MythoMax-L2-13b](https://huggingface.co/Gryphe/MythoMax-L2-13b)
- [NousResearch/Nous-Hermes-llama-2-7b](https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b)
#### Anthropic
- [Claude 1 Instant](https://www.anthropic.com/index/introducing-claude)
- [Claude 2](https://www.anthropic.com/index/claude-2)
## Running Locally ## Running Locally
@@ -84,14 +54,7 @@ These are sample experiments users have created that show how OpenPipe works. Fe
4. Clone this repository: `git clone https://github.com/openpipe/openpipe` 4. Clone this repository: `git clone https://github.com/openpipe/openpipe`
5. Install the dependencies: `cd openpipe && pnpm install` 5. Install the dependencies: `cd openpipe && pnpm install`
6. Create a `.env` file (`cp .env.example .env`) and enter your `OPENAI_API_KEY`. 6. Create a `.env` file (`cp .env.example .env`) and enter your `OPENAI_API_KEY`.
7. Update `DATABASE_URL` if necessary to point to your Postgres instance and run `pnpm prisma migrate dev` to create the database. 7. Update `DATABASE_URL` if necessary to point to your Postgres instance and run `pnpm prisma db push` to create the database.
8. Create a [GitHub OAuth App](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app) and update the `GITHUB_CLIENT_ID` and `GITHUB_CLIENT_SECRET` values. (Note: a PR to make auth optional when running locally would be a great contribution!) 8. Create a [GitHub OAuth App](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app) and update the `GITHUB_CLIENT_ID` and `GITHUB_CLIENT_SECRET` values. (Note: a PR to make auth optional when running locally would be a great contribution!)
9. Start the app: `pnpm dev`. 9. Start the app: `pnpm dev`.
10. Navigate to [http://localhost:3000](http://localhost:3000) 10. Navigate to [http://localhost:3000](http://localhost:3000)
## Testing Locally
1. Copy your `.env` file to `.env.test`.
2. Update the `DATABASE_URL` to have a different database name than your development one
3. Run `DATABASE_URL=[your new datatase url] pnpm prisma migrate dev --skip-seed --skip-generate`
4. Run `pnpm test`

49
app/.gitignore vendored
View File

@@ -1,49 +0,0 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# database
/prisma/db.sqlite
/prisma/db.sqlite-journal
# next.js
/.next/
/out/
next-env.d.ts
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
# do not commit any .env files to git, except for the .env.example file. https://create.t3.gg/en/usage/env-variables#using-environment-variables
.env
.env*.local
.env.test
# vercel
.vercel
# typescript
*.tsbuildinfo
# Sentry Auth Token
.sentryclirc
# custom openai intialization
src/server/utils/openaiCustomConfig.json

View File

@@ -1,47 +0,0 @@
# Adapted from https://create.t3.gg/en/deployment/docker#3-create-dockerfile
FROM node:20.1.0-bullseye as base
RUN yarn global add pnpm
# DEPS
FROM base as deps
WORKDIR /code
COPY app/prisma app/package.json ./app/
COPY client-libs/typescript/package.json ./client-libs/typescript/
COPY pnpm-lock.yaml pnpm-workspace.yaml ./
RUN cd app && pnpm install --frozen-lockfile
# BUILDER
FROM base as builder
# Include all NEXT_PUBLIC_* env vars here
ARG NEXT_PUBLIC_POSTHOG_KEY
ARG NEXT_PUBLIC_SOCKET_URL
ARG NEXT_PUBLIC_HOST
ARG NEXT_PUBLIC_SENTRY_DSN
ARG SENTRY_AUTH_TOKEN
WORKDIR /code
COPY --from=deps /code/node_modules ./node_modules
COPY --from=deps /code/app/node_modules ./app/node_modules
COPY --from=deps /code/client-libs/typescript/node_modules ./client-libs/typescript/node_modules
COPY . .
RUN cd app && SKIP_ENV_VALIDATION=1 pnpm build
# RUNNER
FROM base as runner
WORKDIR /code/app
ENV NODE_ENV production
ENV NEXT_TELEMETRY_DISABLED 1
COPY --from=builder /code/ /code/
EXPOSE 3000
ENV PORT 3000
# Run the "run-prod.sh" script
CMD /code/app/scripts/run-prod.sh

View File

@@ -1,63 +0,0 @@
import nextRoutes from "nextjs-routes/config";
import { withSentryConfig } from "@sentry/nextjs";
/**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially useful
* for Docker builds.
*/
const { env } = await import("./src/env.mjs");
/** @type {import("next").NextConfig} */
let config = {
reactStrictMode: true,
/**
* If you have `experimental: { appDir: true }` set, then you must comment the below `i18n` config
* out.
*
* @see https://github.com/vercel/next.js/issues/41980
*/
i18n: {
locales: ["en"],
defaultLocale: "en",
},
rewrites: async () => [
{
source: "/ingest/:path*",
destination: "https://app.posthog.com/:path*",
},
],
webpack: (config) => {
config.module.rules.push({
test: /\.txt$/,
use: "raw-loader",
});
return config;
},
transpilePackages: ["openpipe"],
};
config = nextRoutes()(config);
if (env.NEXT_PUBLIC_SENTRY_DSN && env.SENTRY_AUTH_TOKEN) {
// @ts-expect-error - `withSentryConfig` is not typed correctly
config = withSentryConfig(
config,
{
authToken: env.SENTRY_AUTH_TOKEN,
silent: true,
org: "openpipe",
project: "openpipe",
},
{
widenClientFileUpload: true,
tunnelRoute: "/monitoring",
disableLogger: true,
},
);
}
export default config;

View File

@@ -1,7 +0,0 @@
{
"$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json",
"spaces": 2,
"generator-cli": {
"version": "6.6.0"
}
}

View File

@@ -1,84 +0,0 @@
Text,sentiment,emotion
@dell your customer service is horrible especially agent syedfaisal who has made this experience of purchasing a new computer downright awful and Ill reconsider ever buying a Dell in the future @DellTech,negative,anger
@zacokalo @Dell @DellCares @Dell give the man what he paid for!,neutral,anger
"COOKING STREAM DAY!!! Ty to @Alienware for sponsoring this stream! Ill be making a bunch of Japanese Alien themed foods hehe
Come check it out! https://t.co/m06tJQ06zk
#alienwarepartner #intelgaming @Dell @IntelGaming https://t.co/qOdQX2E8VD",positive,joy
@emijuju_ @Alienware @Dell @intel Beautiful 😍❤️😻,positive,joy
"What's your biggest data management challenge? • Cloud complexity? • Lengthy tech refresh cycles? • Capital budget constraints? Solve your challenges with as-a-Storage. Get simplicity, agility &amp; control with @Dell #APEX. https://t.co/mCblMtH931 https://t.co/eepKNZ4Ai3",neutral,optimism
"This week we were at the ""Top Gun"" themed @Dell Product Expo. Eddie Muñoz met Maverick look-alike, California Tom Cruise (Jerome LeBlanc)!
""I feel the need, the need for speed."" - Maverick
#topgun #topgunmaverick #dell #delltechnologies #lockncharge https://t.co/QHYH2EbMjq",positive,joy
"Itsss been more than a week...i m following up with dell for troubleshootings...my https://t.co/lWhg2YKhQa suffering so as my hard earned money...hightly disappointed...contd..
@DellCares @Dell",negative,sadness
"@ashu_k7 @Dell Pathetic!!!!! I Dont mind taking legal action, this is deficency of service for which the customer is nt getting help..",negative,anger
@ashu_k7 @Dell Making life unhappy is the new tag line of #Dell,negative,sadness
"@Dell If you are buying a Dell, make sure you are making your life hell.
Better buy other laptops. If you wanted to opt for Dell better opt for garbage on the streets.",negative,anger
"MY DESK'S FINAL FORM? Seriously, I'm finally happy with my monitor setup here... and I'll keep this setup whenever I move... FOREVER. What do you think?
https://t.co/WJZ2JXtOnX
@Alienware @Dell cheers. https://t.co/6Whhldfpv0",positive,joy
"@Dell Dell Alienware computer has had software problems with SupportAssist since purchase. Dell, despite paying for Premium Support, has never fixed issues. Latest solution was to erase everything and reload....SupportAssist still doesn't work.",negative,anger
"HUGE congratulations to Startup Battle 3.0 winner ➡️ @Ox_Fulfillment x @cyborgcharu for being featured in @BusinessInsider &amp; @Dell showcasing the journey at Ox! 🚀🚀🚀
We love to see our portfolio companies continuing to BUILD SOMETHING FROM NOTHING! 🔥 https://t.co/awBkn5ippB",positive,joy
@Dell happy Friday!,positive,joy
"@intel Core i5 1135G7 - 4732 points
@intel Core i5 1235 - 6619 points
@Dell Latitude 5420 x 5430.
Cinebench R23. Good job Intel!",positive,joy
@Dell india we purchased 52 docking station and we have around 100 users using dell laptop as well as dell monitor now they are refusing to replace my faulty product and disconnecting my every call....,negative,anger
"It's another year ans another day But cant fill it in yet the child hood dreams.
It's my birthdy today. Can anyone of you guys bless me with a simplest gaming oc that can run
@DOTA2 ?
@Dell @HP @VastGG @Acer @Alienware @Lenovo @toshiba @IBM @Fujitsu_Global @NEC https://t.co/69G8tL9sN8",neutral,joy
"@idoccor @Dell That's always the decision—wait, or, look elsewhere. In this case, I think I unfortunately need to wait since there are only two monitors with these specs and I don't like the other one 😂",negative,sadness
"@MichaelDell @Dell @DellCares For how long this will continue. It is high time you either fix the problem for good or replace the complete laptop. Spent over 60+ hours with Customer Care teams, which is not helping. Cannot keep going on like this.",negative,anger
"@Dell @DellCares but no, not really",neutral,sadness
"Business innovation requires insight, agility and efficiency. How do you get there? RP PRO, LLC recommends starting by proactively managing IT infrastructure with #OpenManage Systems from @Dell. https://t.co/fBcK1lfFMu https://t.co/xWHLkkHCjn",neutral,optimism
@Dell Yessirrrrr #NationalCoffeeDay,positive,joy
"New blog post from @Dell shared on https://t.co/EgfPChB8AT
Re-routing Our Connected and Autonomous Future https://t.co/AW8EHQrbd6
#future #futuretech #techinnovation https://t.co/koX8stKPsr",neutral,joy
"In a free-market economy, the folks @IronMountain can set prices as they see fit. Their customers are also free to find better prices at competitors like @Dell
@H3CGlobal @HPE
https://t.co/reZ56DNTBI",neutral,optimism
"Delighted to chat with many of our partners here in person at @Intel Innovation! @Dell, @Lenovo, @Supermicro_SMCI, @QuantaQCT #IntelON https://t.co/BxIeGW8deN",positive,joy
"A special gracias to our Startup Chica San Antonio 2022 sponsors @eBay, @jcpenney, @Barbie, @HEB, @Dell, @Honda, @SouthsideSATX💜✨ https://t.co/lZ6WWkziHl",positive,joy
"When your team decides to start supporting developers, your #ops must change too. More from @cote and @Dell Developer Community Manager @barton808: https://t.co/W6f1oMiTgV",neutral,optimism
@EmDStowers @LASERGIANT1 @ohwormongod @Ludovician_Vega @Dell our boy snitchin,neutral,anger
A 1st place dmi:Design Value Award goes to @Dell for a packaging modernization initiative that helped them get closer to their corporate Moonshot Sustainability Goal of 100% recycled or renewable packaging by 2030. More at https://t.co/dnhZWWLCQC #designvalue #DVA22,positive,optimism
Reducing deployment and maintenance complexity is the goal behind @dell and @WindRiver's new collaboration. https://t.co/2PxQgPuHUU,positive,optimism
@jaserhunter @Dell Love the sales pitch lol,positive,joy
@Dell india we purchased 52 docking station and we have around 100 users using dell laptop as well as dell monitor now they are refusing to replace my faulty product and disconnecting my every call....,negative,anger
@ashu_k7 @Dell One more example.. their technical support is also worse. https://t.co/20atSgI4fg,negative,anger
*angry screeches about @Dell proprietary MBR windows 8.1 partitions not being able to save as an img in clonezilla *,negative,anger
@socialitebooks @BBYC_Gamers @Dell @Alienware @BestBuyCanada @intelcanada Congratulations!!!,positive,joy
"Thank you to the @dell team for coming out to volunteer today! We truly appreciate your hard work and look forward to seeing you again soon!
If you and your team are interested in helping out at the UMLAUF, visit our website for more information: https://t.co/lVfsZT2ogS https://t.co/eLz0FY0y4M",positive,joy
"@TheCaramelGamer @intel @bravadogaming @Intel_Africa @Dell @DellTech @DellTechMEA @Alienware @IntelUK we love to see it.
Also also actually actually whoever did that artwork? 🔥🔥🔥 am a fan.",positive,joy
"LOVING MY DELL 2 IN 1 LAPTOP
YAYY 🥳🥳
@Dell #DellInspiron #DellLaptop https://t.co/vib96jf3tC",positive,joy
@Azure @OracleItalia @AWS_Italy @lenovoitalia @Dell discussing the future of #HPC during the #hpcroundtable22 in Turin today #highperformancecomputing https://t.co/jJ1WqBulPF,neutral,joy
Attracting talent @AmericanChamber. @marg_cola @Dell speaks of quality of life connectivity and the Opportunity for development being so crucial. Housing availability is now impacting on decision making for potential candidates. #WhyCork,positive,optimism
.@Dell partners with @WindRiver on modular cloud-native telecommunications infrastructure https://t.co/4SWATspwCP @SiliconANGLE @Mike_Wheatley @holgermu @constellationr,neutral,joy
@Dell Not buy Dell Inspiron laptop,neutral,sadness
"@dell #delltechforum reminding us IDC have predicted that by 2024, 50% of everything we consume in technology will be as a service https://t.co/3UBiZJX0LE",neutral,optimism
@RachMurph @HETTShow @Dell Thank you for coming! Great evening,positive,joy
Congratulations to Jason M of Moncton NB on winning a @Dell @Alienware m15 R7 15.6″ gaming laptop from @BestBuyCanada and @intelcanada's gaming days #contest on the blog. Visit https://t.co/VryaY5Rvv9 to learn about tech and for chances to win new tech. https://t.co/T6n0dzF6oL,positive,joy
@MattVisiwig @Dell Sour taste for sure 😶 But don't let ego distract you from what you really want to buy 😁,neutral,optimism
"Massive thank you goes to sponsors @HendersonLoggie @lindsaysnews @Dell @unity, all of our fantastic judges and mentors and the team at @EGX and @ExCeLLondon.
Big congratulations also to all of our other @AbertayDare teams - an amazing year! #Dare2022 https://t.co/jYe4agO7lW",positive,joy
"@timetcetera @rahaug Nah, I just need @Dell to start paying me comissions 😂",neutral,joy
"""Whether youre an engineer, a designer, or work in supply chain management or sales, there are always opportunities to think about sustainability and how you can do things more efficiently."" 👏 — Oliver Campbell, Director of Packaging Engineering, @Dell https://t.co/vUJLTWNFwP https://t.co/GJWAzGfAxJ",positive,optimism
"Hi, my name is @listerepvp and I support @Dell, always.",positive,joy
1 Text sentiment emotion
2 @dell your customer service is horrible especially agent syedfaisal who has made this experience of purchasing a new computer downright awful and I’ll reconsider ever buying a Dell in the future @DellTech negative anger
3 @zacokalo @Dell @DellCares @Dell give the man what he paid for! neutral anger
4 COOKING STREAM DAY!!! Ty to @Alienware for sponsoring this stream! I’ll be making a bunch of Japanese Alien themed foods hehe Come check it out! https://t.co/m06tJQ06zk #alienwarepartner #intelgaming @Dell @IntelGaming https://t.co/qOdQX2E8VD positive joy
5 @emijuju_ @Alienware @Dell @intel Beautiful 😍❤️😻 positive joy
6 What's your biggest data management challenge? • Cloud complexity? • Lengthy tech refresh cycles? • Capital budget constraints? Solve your challenges with as-a-Storage. Get simplicity, agility &amp; control with @Dell #APEX. https://t.co/mCblMtH931 https://t.co/eepKNZ4Ai3 neutral optimism
7 This week we were at the "Top Gun" themed @Dell Product Expo. Eddie Muñoz met Maverick look-alike, California Tom Cruise (Jerome LeBlanc)! "I feel the need, the need for speed." - Maverick #topgun #topgunmaverick #dell #delltechnologies #lockncharge https://t.co/QHYH2EbMjq positive joy
8 Itsss been more than a week...i m following up with dell for troubleshootings...my https://t.co/lWhg2YKhQa suffering so as my hard earned money...hightly disappointed...contd.. @DellCares @Dell negative sadness
9 @ashu_k7 @Dell Pathetic!!!!! I Dont mind taking legal action, this is deficency of service for which the customer is nt getting help.. negative anger
10 @ashu_k7 @Dell Making life unhappy is the new tag line of #Dell negative sadness
11 @Dell If you are buying a Dell, make sure you are making your life hell. Better buy other laptops. If you wanted to opt for Dell better opt for garbage on the streets. negative anger
12 MY DESK'S FINAL FORM? Seriously, I'm finally happy with my monitor setup here... and I'll keep this setup whenever I move... FOREVER. What do you think? https://t.co/WJZ2JXtOnX @Alienware @Dell cheers. https://t.co/6Whhldfpv0 positive joy
13 @Dell Dell Alienware computer has had software problems with SupportAssist since purchase. Dell, despite paying for Premium Support, has never fixed issues. Latest solution was to erase everything and reload....SupportAssist still doesn't work. negative anger
14 HUGE congratulations to Startup Battle 3.0 winner ➡️ @Ox_Fulfillment x @cyborgcharu for being featured in @BusinessInsider &amp; @Dell showcasing the journey at Ox! 🚀🚀🚀 We love to see our portfolio companies continuing to BUILD SOMETHING FROM NOTHING! 🔥 https://t.co/awBkn5ippB positive joy
15 @Dell happy Friday! positive joy
16 @intel Core i5 1135G7 - 4732 points @intel Core i5 1235 - 6619 points @Dell Latitude 5420 x 5430. Cinebench R23. Good job Intel! positive joy
17 @Dell india we purchased 52 docking station and we have around 100 users using dell laptop as well as dell monitor now they are refusing to replace my faulty product and disconnecting my every call.... negative anger
18 It's another year ans another day But cant fill it in yet the child hood dreams. It's my birthdy today. Can anyone of you guys bless me with a simplest gaming oc that can run @DOTA2 ? @Dell @HP @VastGG @Acer @Alienware @Lenovo @toshiba @IBM @Fujitsu_Global @NEC https://t.co/69G8tL9sN8 neutral joy
19 @idoccor @Dell That's always the decision—wait, or, look elsewhere. In this case, I think I unfortunately need to wait since there are only two monitors with these specs and I don't like the other one 😂 negative sadness
20 @MichaelDell @Dell @DellCares For how long this will continue. It is high time you either fix the problem for good or replace the complete laptop. Spent over 60+ hours with Customer Care teams, which is not helping. Cannot keep going on like this. negative anger
21 @Dell @DellCares but no, not really neutral sadness
22 Business innovation requires insight, agility and efficiency. How do you get there? RP PRO, LLC recommends starting by proactively managing IT infrastructure with #OpenManage Systems from @Dell. https://t.co/fBcK1lfFMu https://t.co/xWHLkkHCjn neutral optimism
23 @Dell Yessirrrrr #NationalCoffeeDay positive joy
24 New blog post from @Dell shared on https://t.co/EgfPChB8AT Re-routing Our Connected and Autonomous Future https://t.co/AW8EHQrbd6 #future #futuretech #techinnovation https://t.co/koX8stKPsr neutral joy
25 In a free-market economy, the folks @IronMountain can set prices as they see fit. Their customers are also free to find better prices at competitors like @Dell @H3CGlobal @HPE https://t.co/reZ56DNTBI neutral optimism
26 Delighted to chat with many of our partners here in person at @Intel Innovation! @Dell, @Lenovo, @Supermicro_SMCI, @QuantaQCT #IntelON https://t.co/BxIeGW8deN positive joy
27 A special gracias to our Startup Chica San Antonio 2022 sponsors @eBay, @jcpenney, @Barbie, @HEB, @Dell, @Honda, @SouthsideSATX💜✨ https://t.co/lZ6WWkziHl positive joy
28 When your team decides to start supporting developers, your #ops must change too. More from @cote and @Dell Developer Community Manager @barton808: https://t.co/W6f1oMiTgV neutral optimism
29 @EmDStowers @LASERGIANT1 @ohwormongod @Ludovician_Vega @Dell our boy snitchin neutral anger
30 A 1st place dmi:Design Value Award goes to @Dell for a packaging modernization initiative that helped them get closer to their corporate Moonshot Sustainability Goal of 100% recycled or renewable packaging by 2030. More at https://t.co/dnhZWWLCQC #designvalue #DVA22 positive optimism
31 Reducing deployment and maintenance complexity is the goal behind @dell and @WindRiver's new collaboration. https://t.co/2PxQgPuHUU positive optimism
32 @jaserhunter @Dell Love the sales pitch lol positive joy
33 @Dell india we purchased 52 docking station and we have around 100 users using dell laptop as well as dell monitor now they are refusing to replace my faulty product and disconnecting my every call.... negative anger
34 @ashu_k7 @Dell One more example.. their technical support is also worse. https://t.co/20atSgI4fg negative anger
35 *angry screeches about @Dell proprietary MBR windows 8.1 partitions not being able to save as an img in clonezilla * negative anger
36 @socialitebooks @BBYC_Gamers @Dell @Alienware @BestBuyCanada @intelcanada Congratulations!!! positive joy
37 Thank you to the @dell team for coming out to volunteer today! We truly appreciate your hard work and look forward to seeing you again soon! If you and your team are interested in helping out at the UMLAUF, visit our website for more information: https://t.co/lVfsZT2ogS https://t.co/eLz0FY0y4M positive joy
38 @TheCaramelGamer @intel @bravadogaming @Intel_Africa @Dell @DellTech @DellTechMEA @Alienware @IntelUK we love to see it. Also also actually actually whoever did that artwork? 🔥🔥🔥 am a fan. positive joy
39 LOVING MY DELL 2 IN 1 LAPTOP YAYY 🥳🥳 @Dell #DellInspiron #DellLaptop https://t.co/vib96jf3tC positive joy
40 @Azure @OracleItalia @AWS_Italy @lenovoitalia @Dell discussing the future of #HPC during the #hpcroundtable22 in Turin today #highperformancecomputing https://t.co/jJ1WqBulPF neutral joy
41 Attracting talent @AmericanChamber. @marg_cola @Dell speaks of quality of life connectivity and the Opportunity for development being so crucial. Housing availability is now impacting on decision making for potential candidates. #WhyCork positive optimism
42 .@Dell partners with @WindRiver on modular cloud-native telecommunications infrastructure https://t.co/4SWATspwCP @SiliconANGLE @Mike_Wheatley @holgermu @constellationr neutral joy
43 @Dell Not buy Dell Inspiron laptop neutral sadness
44 @dell #delltechforum reminding us IDC have predicted that by 2024, 50% of everything we consume in technology will be as a service https://t.co/3UBiZJX0LE neutral optimism
45 @RachMurph @HETTShow @Dell Thank you for coming! Great evening positive joy
46 Congratulations to Jason M of Moncton NB on winning a @Dell @Alienware m15 R7 15.6″ gaming laptop from @BestBuyCanada and @intelcanada's gaming days #contest on the blog. Visit https://t.co/VryaY5Rvv9 to learn about tech and for chances to win new tech. https://t.co/T6n0dzF6oL positive joy
47 @MattVisiwig @Dell Sour taste for sure 😶 But don't let ego distract you from what you really want to buy 😁 neutral optimism
48 Massive thank you goes to sponsors @HendersonLoggie @lindsaysnews @Dell @unity, all of our fantastic judges and mentors and the team at @EGX and @ExCeLLondon. Big congratulations also to all of our other @AbertayDare teams - an amazing year! #Dare2022 https://t.co/jYe4agO7lW positive joy
49 @timetcetera @rahaug Nah, I just need @Dell to start paying me comissions 😂 neutral joy
50 "Whether you’re an engineer, a designer, or work in supply chain management or sales, there are always opportunities to think about sustainability and how you can do things more efficiently." 👏 — Oliver Campbell, Director of Packaging Engineering, @Dell https://t.co/vUJLTWNFwP https://t.co/GJWAzGfAxJ positive optimism
51 Hi, my name is @listerepvp and I support @Dell, always. positive joy

View File

@@ -1,12 +0,0 @@
import { prisma } from "~/server/db";
// delete most recent fineTune
const mostRecentFineTune = await prisma.fineTune.findFirst({
orderBy: { createdAt: "desc" },
});
if (mostRecentFineTune) {
await prisma.fineTune.delete({
where: { id: mostRecentFineTune.id },
});
}

View File

@@ -1,8 +0,0 @@
/*
Warnings:
- You are about to drop the column `streamingChannel` on the `ScenarioVariantCell` table. All the data in the column will be lost.
*/
-- AlterTable
ALTER TABLE "ScenarioVariantCell" DROP COLUMN "streamingChannel";

View File

@@ -1,52 +0,0 @@
-- DropForeignKey
ALTER TABLE "ModelOutput" DROP CONSTRAINT "ModelOutput_scenarioVariantCellId_fkey";
-- DropForeignKey
ALTER TABLE "OutputEvaluation" DROP CONSTRAINT "OutputEvaluation_modelOutputId_fkey";
-- DropIndex
DROP INDEX "OutputEvaluation_modelOutputId_evaluationId_key";
-- AlterTable
ALTER TABLE "OutputEvaluation" RENAME COLUMN "modelOutputId" TO "modelResponseId";
-- AlterTable
ALTER TABLE "ScenarioVariantCell" DROP COLUMN "retryTime",
DROP COLUMN "statusCode",
ADD COLUMN "jobQueuedAt" TIMESTAMP(3),
ADD COLUMN "jobStartedAt" TIMESTAMP(3);
ALTER TABLE "ModelOutput" RENAME TO "ModelResponse";
ALTER TABLE "ModelResponse"
ADD COLUMN "requestedAt" TIMESTAMP(3),
ADD COLUMN "receivedAt" TIMESTAMP(3),
ADD COLUMN "statusCode" INTEGER,
ADD COLUMN "errorMessage" TEXT,
ADD COLUMN "retryTime" TIMESTAMP(3),
ADD COLUMN "outdated" BOOLEAN NOT NULL DEFAULT false;
-- 3. Remove the unnecessary column
ALTER TABLE "ModelResponse"
DROP COLUMN "timeToComplete";
-- AlterTable
ALTER TABLE "ModelResponse" RENAME CONSTRAINT "ModelOutput_pkey" TO "ModelResponse_pkey";
ALTER TABLE "ModelResponse" ALTER COLUMN "output" DROP NOT NULL;
-- DropIndex
DROP INDEX "ModelOutput_scenarioVariantCellId_key";
-- AddForeignKey
ALTER TABLE "ModelResponse" ADD CONSTRAINT "ModelResponse_scenarioVariantCellId_fkey" FOREIGN KEY ("scenarioVariantCellId") REFERENCES "ScenarioVariantCell"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- RenameIndex
ALTER INDEX "ModelOutput_inputHash_idx" RENAME TO "ModelResponse_inputHash_idx";
-- CreateIndex
CREATE UNIQUE INDEX "OutputEvaluation_modelResponseId_evaluationId_key" ON "OutputEvaluation"("modelResponseId", "evaluationId");
-- AddForeignKey
ALTER TABLE "OutputEvaluation" ADD CONSTRAINT "OutputEvaluation_modelResponseId_fkey" FOREIGN KEY ("modelResponseId") REFERENCES "ModelResponse"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,16 +0,0 @@
-- CreateTable
CREATE TABLE "WorldChampEntrant" (
"id" UUID NOT NULL,
"userId" UUID NOT NULL,
"approved" BOOLEAN NOT NULL DEFAULT false,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "WorldChampEntrant_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "WorldChampEntrant_userId_key" ON "WorldChampEntrant"("userId");
-- AddForeignKey
ALTER TABLE "WorldChampEntrant" ADD CONSTRAINT "WorldChampEntrant_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,3 +0,0 @@
-- AlterTable
ALTER TABLE "User" ADD COLUMN "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
ADD COLUMN "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP;

View File

@@ -1,5 +0,0 @@
-- CreateEnum
CREATE TYPE "UserRole" AS ENUM ('ADMIN', 'USER');
-- AlterTable
ALTER TABLE "User" ADD COLUMN "role" "UserRole" NOT NULL DEFAULT 'USER';

View File

@@ -1,28 +0,0 @@
-- CreateTable
CREATE TABLE "Dataset" (
"id" UUID NOT NULL,
"name" TEXT NOT NULL,
"organizationId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "Dataset_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "DatasetEntry" (
"id" UUID NOT NULL,
"input" TEXT NOT NULL,
"output" TEXT,
"datasetId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "DatasetEntry_pkey" PRIMARY KEY ("id")
);
-- AddForeignKey
ALTER TABLE "Dataset" ADD CONSTRAINT "Dataset_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "DatasetEntry" ADD CONSTRAINT "DatasetEntry_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,13 +0,0 @@
/*
Warnings:
- You are about to drop the column `constructFn` on the `PromptVariant` table. All the data in the column will be lost.
- You are about to drop the column `constructFnVersion` on the `PromptVariant` table. All the data in the column will be lost.
- Added the required column `promptConstructor` to the `PromptVariant` table without a default value. This is not possible if the table is not empty.
- Added the required column `promptConstructorVersion` to the `PromptVariant` table without a default value. This is not possible if the table is not empty.
*/
-- AlterTable
ALTER TABLE "PromptVariant" RENAME COLUMN "constructFn" TO "promptConstructor";
ALTER TABLE "PromptVariant" RENAME COLUMN "constructFnVersion" TO "promptConstructorVersion";

View File

@@ -1,90 +0,0 @@
-- CreateTable
CREATE TABLE "LoggedCall" (
"id" UUID NOT NULL,
"startTime" TIMESTAMP(3) NOT NULL,
"cacheHit" BOOLEAN NOT NULL,
"modelResponseId" UUID NOT NULL,
"organizationId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "LoggedCall_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "LoggedCallModelResponse" (
"id" UUID NOT NULL,
"reqPayload" JSONB NOT NULL,
"respStatus" INTEGER,
"respPayload" JSONB,
"error" TEXT,
"startTime" TIMESTAMP(3) NOT NULL,
"endTime" TIMESTAMP(3) NOT NULL,
"cacheKey" TEXT,
"durationMs" INTEGER,
"inputTokens" INTEGER,
"outputTokens" INTEGER,
"finishReason" TEXT,
"completionId" TEXT,
"totalCost" DECIMAL(18,12),
"originalLoggedCallId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "LoggedCallModelResponse_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "LoggedCallTag" (
"id" UUID NOT NULL,
"name" TEXT NOT NULL,
"value" TEXT,
"loggedCallId" UUID NOT NULL,
CONSTRAINT "LoggedCallTag_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "ApiKey" (
"id" UUID NOT NULL,
"name" TEXT NOT NULL,
"apiKey" TEXT NOT NULL,
"organizationId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "ApiKey_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "LoggedCall_startTime_idx" ON "LoggedCall"("startTime");
-- CreateIndex
CREATE UNIQUE INDEX "LoggedCallModelResponse_originalLoggedCallId_key" ON "LoggedCallModelResponse"("originalLoggedCallId");
-- CreateIndex
CREATE INDEX "LoggedCallModelResponse_cacheKey_idx" ON "LoggedCallModelResponse"("cacheKey");
-- CreateIndex
CREATE INDEX "LoggedCallTag_name_idx" ON "LoggedCallTag"("name");
-- CreateIndex
CREATE INDEX "LoggedCallTag_name_value_idx" ON "LoggedCallTag"("name", "value");
-- CreateIndex
CREATE UNIQUE INDEX "ApiKey_apiKey_key" ON "ApiKey"("apiKey");
-- AddForeignKey
ALTER TABLE "LoggedCall" ADD CONSTRAINT "LoggedCall_modelResponseId_fkey" FOREIGN KEY ("modelResponseId") REFERENCES "LoggedCallModelResponse"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "LoggedCall" ADD CONSTRAINT "LoggedCall_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "LoggedCallModelResponse" ADD CONSTRAINT "LoggedCallModelResponse_originalLoggedCallId_fkey" FOREIGN KEY ("originalLoggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "LoggedCallTag" ADD CONSTRAINT "LoggedCallTag_loggedCallId_fkey" FOREIGN KEY ("loggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ApiKey" ADD CONSTRAINT "ApiKey_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "Organization" ADD COLUMN "name" TEXT NOT NULL DEFAULT 'Project 1';

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "LoggedCall" ALTER COLUMN "modelResponseId" DROP NOT NULL;

View File

@@ -1,37 +0,0 @@
-- Rename Enum
ALTER TYPE "OrganizationUserRole" RENAME TO "ProjectUserRole";
-- Drop and recreate foreign keys
ALTER TABLE "ApiKey" DROP CONSTRAINT "ApiKey_organizationId_fkey";
ALTER TABLE "Dataset" DROP CONSTRAINT "Dataset_organizationId_fkey";
ALTER TABLE "Experiment" DROP CONSTRAINT "Experiment_organizationId_fkey";
ALTER TABLE "LoggedCall" DROP CONSTRAINT "LoggedCall_organizationId_fkey";
ALTER TABLE "OrganizationUser" DROP CONSTRAINT "OrganizationUser_organizationId_fkey";
ALTER TABLE "OrganizationUser" DROP CONSTRAINT "OrganizationUser_userId_fkey";
-- Rename columns
ALTER TABLE "ApiKey" RENAME COLUMN "organizationId" TO "projectId";
ALTER TABLE "Dataset" RENAME COLUMN "organizationId" TO "projectId";
ALTER TABLE "Experiment" RENAME COLUMN "organizationId" TO "projectId";
ALTER TABLE "LoggedCall" RENAME COLUMN "organizationId" TO "projectId";
ALTER TABLE "OrganizationUser" RENAME COLUMN "organizationId" TO "projectId";
ALTER TABLE "Organization" RENAME COLUMN "personalOrgUserId" TO "personalProjectUserId";
-- Rename table
ALTER TABLE "Organization" RENAME TO "Project";
ALTER TABLE "OrganizationUser" RENAME TO "ProjectUser";
-- Recreate foreign keys
ALTER TABLE "Experiment" ADD CONSTRAINT "Experiment_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
ALTER TABLE "Dataset" ADD CONSTRAINT "Dataset_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
ALTER TABLE "ProjectUser" ADD CONSTRAINT "ProjectUser_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
ALTER TABLE "ProjectUser" ADD CONSTRAINT "ProjectUser_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
ALTER TABLE "LoggedCall" ADD CONSTRAINT "LoggedCall_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
ALTER TABLE "ApiKey" ADD CONSTRAINT "ApiKey_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- Rename indexes
ALTER TABLE "Project" RENAME CONSTRAINT "Organization_pkey" TO "Project_pkey";
ALTER TABLE "ProjectUser" RENAME CONSTRAINT "OrganizationUser_pkey" TO "ProjectUser_pkey";
ALTER TABLE "Project" RENAME CONSTRAINT "Organization_personalOrgUserId_fkey" TO "Project_personalProjectUserId_fkey";
ALTER INDEX "Organization_personalOrgUserId_key" RENAME TO "Project_personalProjectUserId_key";
ALTER INDEX "OrganizationUser_organizationId_userId_key" RENAME TO "ProjectUser_projectId_userId_key";

View File

@@ -1 +0,0 @@
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";

View File

@@ -1,66 +0,0 @@
/*
Warnings:
- You are about to rename the column `completionTokens` to `outputTokens` on the `ModelResponse` table.
- You are about to rename the column `promptTokens` to `inputTokens` on the `ModelResponse` table.
- You are about to rename the column `startTime` on the `LoggedCall` table to `requestedAt`. Ensure compatibility with application logic.
- You are about to rename the column `startTime` on the `LoggedCallModelResponse` table to `requestedAt`. Ensure compatibility with application logic.
- You are about to rename the column `endTime` on the `LoggedCallModelResponse` table to `receivedAt`. Ensure compatibility with application logic.
- You are about to rename the column `error` on the `LoggedCallModelResponse` table to `errorMessage`. Ensure compatibility with application logic.
- You are about to rename the column `respStatus` on the `LoggedCallModelResponse` table to `statusCode`. Ensure compatibility with application logic.
- You are about to rename the column `totalCost` on the `LoggedCallModelResponse` table to `cost`. Ensure compatibility with application logic.
- You are about to rename the column `inputHash` on the `ModelResponse` table to `cacheKey`. Ensure compatibility with application logic.
- You are about to rename the column `output` on the `ModelResponse` table to `respPayload`. Ensure compatibility with application logic.
*/
-- DropIndex
DROP INDEX "LoggedCall_startTime_idx";
-- DropIndex
DROP INDEX "ModelResponse_inputHash_idx";
-- Rename completionTokens to outputTokens
ALTER TABLE "ModelResponse"
RENAME COLUMN "completionTokens" TO "outputTokens";
-- Rename promptTokens to inputTokens
ALTER TABLE "ModelResponse"
RENAME COLUMN "promptTokens" TO "inputTokens";
-- AlterTable
ALTER TABLE "LoggedCall"
RENAME COLUMN "startTime" TO "requestedAt";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "startTime" TO "requestedAt";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "endTime" TO "receivedAt";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "error" TO "errorMessage";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "respStatus" TO "statusCode";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "totalCost" TO "cost";
-- AlterTable
ALTER TABLE "ModelResponse"
RENAME COLUMN "inputHash" TO "cacheKey";
-- AlterTable
ALTER TABLE "ModelResponse"
RENAME COLUMN "output" TO "respPayload";
-- CreateIndex
CREATE INDEX "LoggedCall_requestedAt_idx" ON "LoggedCall"("requestedAt");
-- CreateIndex
CREATE INDEX "ModelResponse_cacheKey_idx" ON "ModelResponse"("cacheKey");

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "LoggedCall" ADD COLUMN "model" TEXT;

View File

@@ -1,22 +0,0 @@
-- DropIndex
DROP INDEX "LoggedCallTag_name_idx";
DROP INDEX "LoggedCallTag_name_value_idx";
-- AlterTable: Add projectId column without NOT NULL constraint for now
ALTER TABLE "LoggedCallTag" ADD COLUMN "projectId" UUID;
-- Set the default value
UPDATE "LoggedCallTag" lct
SET "projectId" = lc."projectId"
FROM "LoggedCall" lc
WHERE lct."loggedCallId" = lc.id;
-- Now set the NOT NULL constraint
ALTER TABLE "LoggedCallTag" ALTER COLUMN "projectId" SET NOT NULL;
-- CreateIndex
CREATE INDEX "LoggedCallTag_projectId_name_idx" ON "LoggedCallTag"("projectId", "name");
CREATE INDEX "LoggedCallTag_projectId_name_value_idx" ON "LoggedCallTag"("projectId", "name", "value");
-- CreateIndex
CREATE UNIQUE INDEX "LoggedCallTag_loggedCallId_name_key" ON "LoggedCallTag"("loggedCallId", "name");

View File

@@ -1,25 +0,0 @@
-- CreateTable
CREATE TABLE "UserInvitation" (
"id" UUID NOT NULL,
"projectId" UUID NOT NULL,
"email" TEXT NOT NULL,
"role" "ProjectUserRole" NOT NULL,
"invitationToken" TEXT NOT NULL,
"senderId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "UserInvitation_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "UserInvitation_invitationToken_key" ON "UserInvitation"("invitationToken");
-- CreateIndex
CREATE UNIQUE INDEX "UserInvitation_projectId_email_key" ON "UserInvitation"("projectId", "email");
-- AddForeignKey
ALTER TABLE "UserInvitation" ADD CONSTRAINT "UserInvitation_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "UserInvitation" ADD CONSTRAINT "UserInvitation_senderId_fkey" FOREIGN KEY ("senderId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,88 +0,0 @@
/*
* Copyright 2023 Viascom Ltd liab. Co
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION nanoid(
size int DEFAULT 21,
alphabet text DEFAULT '_-0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
)
RETURNS text
LANGUAGE plpgsql
volatile
AS
$$
DECLARE
idBuilder text := '';
counter int := 0;
bytes bytea;
alphabetIndex int;
alphabetArray text[];
alphabetLength int;
mask int;
step int;
BEGIN
alphabetArray := regexp_split_to_array(alphabet, '');
alphabetLength := array_length(alphabetArray, 1);
mask := (2 << cast(floor(log(alphabetLength - 1) / log(2)) as int)) - 1;
step := cast(ceil(1.6 * mask * size / alphabetLength) AS int);
while true
loop
bytes := gen_random_bytes(step);
while counter < step
loop
alphabetIndex := (get_byte(bytes, counter) & mask) + 1;
if alphabetIndex <= alphabetLength then
idBuilder := idBuilder || alphabetArray[alphabetIndex];
if length(idBuilder) = size then
return idBuilder;
end if;
end if;
counter := counter + 1;
end loop;
counter := 0;
end loop;
END
$$;
-- Make a short_nanoid function that uses the default alphabet and length of 15
CREATE OR REPLACE FUNCTION short_nanoid()
RETURNS text
LANGUAGE plpgsql
volatile
AS
$$
BEGIN
RETURN nanoid(15, '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
END
$$;
-- AlterTable
ALTER TABLE "Experiment" ADD COLUMN "slug" TEXT NOT NULL DEFAULT short_nanoid();
-- For existing experiments, keep the existing id as the slug for backwards compatibility
UPDATE "Experiment" SET "slug" = "id";
-- CreateIndex
CREATE UNIQUE INDEX "Experiment_slug_key" ON "Experiment"("slug");

View File

@@ -1,48 +0,0 @@
/*
Warnings:
- You are about to drop the column `input` on the `DatasetEntry` table. All the data in the column will be lost.
- You are about to drop the column `output` on the `DatasetEntry` table. All the data in the column will be lost.
- Added the required column `loggedCallId` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
*/
-- AlterTable
ALTER TABLE "DatasetEntry" DROP COLUMN "input",
DROP COLUMN "output",
ADD COLUMN "loggedCallId" UUID NOT NULL;
-- AddForeignKey
ALTER TABLE "DatasetEntry" ADD CONSTRAINT "DatasetEntry_loggedCallId_fkey" FOREIGN KEY ("loggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AlterTable
ALTER TABLE "LoggedCallModelResponse" ALTER COLUMN "cost" SET DATA TYPE DOUBLE PRECISION;
-- CreateEnum
CREATE TYPE "FineTuneStatus" AS ENUM ('PENDING', 'TRAINING', 'AWAITING_DEPLOYMENT', 'DEPLOYING', 'DEPLOYED', 'ERROR');
-- CreateTable
CREATE TABLE "FineTune" (
"id" UUID NOT NULL,
"slug" TEXT NOT NULL,
"baseModel" TEXT NOT NULL,
"status" "FineTuneStatus" NOT NULL DEFAULT 'PENDING',
"trainingStartedAt" TIMESTAMP(3),
"trainingFinishedAt" TIMESTAMP(3),
"deploymentStartedAt" TIMESTAMP(3),
"deploymentFinishedAt" TIMESTAMP(3),
"datasetId" UUID NOT NULL,
"projectId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "FineTune_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "FineTune_slug_key" ON "FineTune"("slug");
-- AddForeignKey
ALTER TABLE "FineTune" ADD CONSTRAINT "FineTune_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "FineTune" ADD CONSTRAINT "FineTune_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,26 +0,0 @@
/*
Warnings:
- Added the required column `inputTokens` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
- Added the required column `outputTokens` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
- Added the required column `type` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
*/
-- CreateEnum
CREATE TYPE "DatasetEntryType" AS ENUM ('TRAIN', 'TEST');
-- AlterTable
ALTER TABLE "Dataset" ADD COLUMN "trainingRatio" DOUBLE PRECISION NOT NULL DEFAULT 0.8;
-- AlterTable
ALTER TABLE "DatasetEntry" ADD COLUMN "input" JSONB NOT NULL DEFAULT '[]',
ADD COLUMN "inputTokens" INTEGER NOT NULL DEFAULT 0,
ADD COLUMN "output" JSONB,
ADD COLUMN "outputTokens" INTEGER NOT NULL DEFAULT 0,
ADD COLUMN "type" "DatasetEntryType" NOT NULL DEFAULT 'TRAIN';
-- CreateIndex
CREATE INDEX "DatasetEntry_datasetId_createdAt_id_idx" ON "DatasetEntry"("datasetId", "createdAt", "id");
-- CreateIndex
CREATE INDEX "DatasetEntry_datasetId_type_idx" ON "DatasetEntry"("datasetId", "type");

View File

@@ -1,5 +0,0 @@
-- AlterTable
ALTER TABLE "DatasetEntry" ALTER COLUMN "loggedCallId" DROP NOT NULL,
ALTER COLUMN "inputTokens" DROP DEFAULT,
ALTER COLUMN "outputTokens" DROP DEFAULT,
ALTER COLUMN "type" DROP DEFAULT;

View File

@@ -1,23 +0,0 @@
-- CreateEnum
CREATE TYPE "DatasetFileUploadStatus" AS ENUM ('PENDING', 'DOWNLOADING', 'PROCESSING', 'SAVING', 'COMPLETE', 'ERROR');
-- CreateTable
CREATE TABLE "DatasetFileUpload" (
"id" UUID NOT NULL,
"datasetId" UUID NOT NULL,
"blobName" TEXT NOT NULL,
"fileName" TEXT NOT NULL,
"fileSize" INTEGER NOT NULL,
"progress" INTEGER NOT NULL DEFAULT 0,
"status" "DatasetFileUploadStatus" NOT NULL DEFAULT 'PENDING',
"uploadedAt" TIMESTAMP(3) NOT NULL,
"visible" BOOLEAN NOT NULL DEFAULT true,
"errorMessage" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "DatasetFileUpload_pkey" PRIMARY KEY ("id")
);
-- AddForeignKey
ALTER TABLE "DatasetFileUpload" ADD CONSTRAINT "DatasetFileUpload_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,507 +0,0 @@
// This is your Prisma schema file,
// learn more about it in the docs: https://pris.ly/d/prisma-schema
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
model Experiment {
id String @id @default(uuid()) @db.Uuid
slug String @unique @default(dbgenerated("short_nanoid()"))
label String
sortIndex Int @default(0)
projectId String @db.Uuid
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
templateVariables TemplateVariable[]
promptVariants PromptVariant[]
testScenarios TestScenario[]
evaluations Evaluation[]
}
model PromptVariant {
id String @id @default(uuid()) @db.Uuid
label String
promptConstructor String
promptConstructorVersion Int
model String
modelProvider String
uiId String @default(uuid()) @db.Uuid
visible Boolean @default(true)
sortIndex Int @default(0)
experimentId String @db.Uuid
experiment Experiment @relation(fields: [experimentId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
scenarioVariantCells ScenarioVariantCell[]
@@index([uiId])
}
model TestScenario {
id String @id @default(uuid()) @db.Uuid
variableValues Json
uiId String @default(uuid()) @db.Uuid
visible Boolean @default(true)
sortIndex Int @default(0)
experimentId String @db.Uuid
experiment Experiment @relation(fields: [experimentId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
scenarioVariantCells ScenarioVariantCell[]
}
model TemplateVariable {
id String @id @default(uuid()) @db.Uuid
label String
experimentId String @db.Uuid
experiment Experiment @relation(fields: [experimentId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
enum CellRetrievalStatus {
PENDING
IN_PROGRESS
COMPLETE
ERROR
}
model ScenarioVariantCell {
id String @id @default(uuid()) @db.Uuid
retrievalStatus CellRetrievalStatus @default(COMPLETE)
jobQueuedAt DateTime?
jobStartedAt DateTime?
modelResponses ModelResponse[]
errorMessage String? // Contains errors that occurred independently of model responses
promptVariantId String @db.Uuid
promptVariant PromptVariant @relation(fields: [promptVariantId], references: [id], onDelete: Cascade)
prompt Json?
testScenarioId String @db.Uuid
testScenario TestScenario @relation(fields: [testScenarioId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([promptVariantId, testScenarioId])
}
model ModelResponse {
id String @id @default(uuid()) @db.Uuid
cacheKey String
requestedAt DateTime?
receivedAt DateTime?
respPayload Json?
cost Float?
inputTokens Int?
outputTokens Int?
statusCode Int?
errorMessage String?
retryTime DateTime?
outdated Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
scenarioVariantCellId String @db.Uuid
scenarioVariantCell ScenarioVariantCell @relation(fields: [scenarioVariantCellId], references: [id], onDelete: Cascade)
outputEvaluations OutputEvaluation[]
@@index([cacheKey])
}
enum EvalType {
CONTAINS
DOES_NOT_CONTAIN
GPT4_EVAL
}
model Evaluation {
id String @id @default(uuid()) @db.Uuid
label String
evalType EvalType
value String
experimentId String @db.Uuid
experiment Experiment @relation(fields: [experimentId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
outputEvaluations OutputEvaluation[]
}
model OutputEvaluation {
id String @id @default(uuid()) @db.Uuid
// Number between 0 (fail) and 1 (pass)
result Float
details String?
modelResponseId String @db.Uuid
modelResponse ModelResponse @relation(fields: [modelResponseId], references: [id], onDelete: Cascade)
evaluationId String @db.Uuid
evaluation Evaluation @relation(fields: [evaluationId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([modelResponseId, evaluationId])
}
enum DatasetFileUploadStatus {
PENDING
DOWNLOADING
PROCESSING
SAVING
COMPLETE
ERROR
}
model DatasetFileUpload {
id String @id @default(uuid()) @db.Uuid
datasetId String @db.Uuid
dataset Dataset @relation(fields: [datasetId], references: [id], onDelete: Cascade)
blobName String
fileName String
fileSize Int
progress Int @default(0) // Percentage
status DatasetFileUploadStatus @default(PENDING)
uploadedAt DateTime
visible Boolean @default(true)
errorMessage String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model Dataset {
id String @id @default(uuid()) @db.Uuid
name String
datasetEntries DatasetEntry[]
fineTunes FineTune[]
datasetFileUploads DatasetFileUpload[]
trainingRatio Float @default(0.8)
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
enum DatasetEntryType {
TRAIN
TEST
}
model DatasetEntry {
id String @id @default(uuid()) @db.Uuid
loggedCallId String? @db.Uuid
loggedCall LoggedCall? @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
input Json @default("[]")
output Json?
inputTokens Int
outputTokens Int
type DatasetEntryType
datasetId String @db.Uuid
dataset Dataset? @relation(fields: [datasetId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([datasetId, createdAt, id])
@@index([datasetId, type])
}
model Project {
id String @id @default(uuid()) @db.Uuid
name String @default("Project 1")
personalProjectUserId String? @unique @db.Uuid
personalProjectUser User? @relation(fields: [personalProjectUserId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
projectUsers ProjectUser[]
projectUserInvitations UserInvitation[]
experiments Experiment[]
datasets Dataset[]
loggedCalls LoggedCall[]
fineTunes FineTune[]
apiKeys ApiKey[]
}
enum ProjectUserRole {
ADMIN
MEMBER
VIEWER
}
model ProjectUser {
id String @id @default(uuid()) @db.Uuid
role ProjectUserRole
projectId String @db.Uuid
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
userId String @db.Uuid
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([projectId, userId])
}
model WorldChampEntrant {
id String @id @default(uuid()) @db.Uuid
userId String @db.Uuid
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
approved Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([userId])
}
model LoggedCall {
id String @id @default(uuid()) @db.Uuid
requestedAt DateTime
// True if this call was served from the cache, false otherwise
cacheHit Boolean
// A LoggedCall is always associated with a LoggedCallModelResponse. If this
// is a cache miss, we create a new LoggedCallModelResponse.
// If it's a cache hit, it's a pre-existing LoggedCallModelResponse.
modelResponseId String? @db.Uuid
modelResponse LoggedCallModelResponse? @relation(fields: [modelResponseId], references: [id], onDelete: Cascade)
// The responses created by this LoggedCall. Will be empty if this LoggedCall was a cache hit.
createdResponses LoggedCallModelResponse[] @relation(name: "ModelResponseOriginalCall")
projectId String @db.Uuid
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
model String?
tags LoggedCallTag[]
datasetEntries DatasetEntry[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([requestedAt])
}
model LoggedCallModelResponse {
id String @id @default(uuid()) @db.Uuid
reqPayload Json
// The HTTP status returned by the model provider
statusCode Int?
respPayload Json?
// Should be null if the request was successful, and some string if the request failed.
errorMessage String?
requestedAt DateTime
receivedAt DateTime
// Note: the function to calculate the cacheKey should include the project
// ID so we don't share cached responses between projects, which could be an
// attack vector. Also, we should only set the cacheKey on the model if the
// request was successful.
cacheKey String?
// Derived fields
durationMs Int?
inputTokens Int?
outputTokens Int?
finishReason String?
completionId String?
cost Float?
// The LoggedCall that created this LoggedCallModelResponse
originalLoggedCallId String @unique @db.Uuid
originalLoggedCall LoggedCall @relation(name: "ModelResponseOriginalCall", fields: [originalLoggedCallId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
loggedCalls LoggedCall[]
@@index([cacheKey])
}
model LoggedCallTag {
id String @id @default(uuid()) @db.Uuid
name String
value String?
projectId String @db.Uuid
loggedCallId String @db.Uuid
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
@@unique([loggedCallId, name])
@@index([projectId, name])
@@index([projectId, name, value])
}
model ApiKey {
id String @id @default(uuid()) @db.Uuid
name String
apiKey String @unique
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model Account {
id String @id @default(uuid()) @db.Uuid
userId String @db.Uuid
type String
provider String
providerAccountId String
refresh_token String? @db.Text
refresh_token_expires_in Int?
access_token String? @db.Text
expires_at Int?
token_type String?
scope String?
id_token String? @db.Text
session_state String?
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@unique([provider, providerAccountId])
}
model Session {
id String @id @default(uuid()) @db.Uuid
sessionToken String @unique
userId String @db.Uuid
expires DateTime
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
}
enum UserRole {
ADMIN
USER
}
model User {
id String @id @default(uuid()) @db.Uuid
name String?
email String? @unique
emailVerified DateTime?
image String?
role UserRole @default(USER)
accounts Account[]
sessions Session[]
projectUsers ProjectUser[]
projects Project[]
worldChampEntrant WorldChampEntrant?
sentUserInvitations UserInvitation[]
createdAt DateTime @default(now())
updatedAt DateTime @default(now()) @updatedAt
}
model UserInvitation {
id String @id @default(uuid()) @db.Uuid
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
email String
role ProjectUserRole
invitationToken String @unique
senderId String @db.Uuid
sender User @relation(fields: [senderId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([projectId, email])
}
model VerificationToken {
identifier String
token String @unique
expires DateTime
@@unique([identifier, token])
}
enum FineTuneStatus {
PENDING
TRAINING
AWAITING_DEPLOYMENT
DEPLOYING
DEPLOYED
ERROR
}
model FineTune {
id String @id @default(uuid()) @db.Uuid
slug String @unique
baseModel String
status FineTuneStatus @default(PENDING)
trainingStartedAt DateTime?
trainingFinishedAt DateTime?
deploymentStartedAt DateTime?
deploymentFinishedAt DateTime?
datasetId String @db.Uuid
dataset Dataset @relation(fields: [datasetId], references: [id], onDelete: Cascade)
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}

View File

@@ -1,127 +0,0 @@
import { prisma } from "~/server/db";
import dedent from "dedent";
import { execSync } from "child_process";
import fs from "fs";
import { promptConstructorVersion } from "~/promptConstructor/version";
const defaultId = "11111111-1111-1111-1111-111111111112";
await prisma.project.deleteMany({
where: { id: defaultId },
});
// If there's an existing project, just seed into it
const project =
(await prisma.project.findFirst({})) ??
(await prisma.project.create({
data: { id: defaultId },
}));
// Clone the repo from git@github.com:microsoft/AGIEval.git into a tmp dir if it doesn't exist
const tmpDir = "/tmp/agi-eval";
if (!fs.existsSync(tmpDir)) {
execSync(`git clone git@github.com:microsoft/AGIEval.git ${tmpDir}`);
}
const datasets = [
"sat-en",
"sat-math",
"lsat-rc",
"lsat-ar",
"aqua-rat",
"logiqa-en",
"lsat-lr",
"math",
];
type Scenario = {
passage: string | null;
question: string;
options: string[] | null;
label: string;
};
for (const dataset of datasets) {
const experimentName = `AGI-Eval: ${dataset}`;
const oldExperiment = await prisma.experiment.findFirst({
where: {
label: experimentName,
projectId: project.id,
},
});
if (oldExperiment) {
await prisma.experiment.deleteMany({
where: { id: oldExperiment.id },
});
}
const experiment = await prisma.experiment.create({
data: {
id: oldExperiment?.id ?? undefined,
label: experimentName,
projectId: project.id,
},
});
const scenarios: Scenario[] = fs
.readFileSync(`${tmpDir}/data/v1/${dataset}.jsonl`, "utf8")
.split("\n")
.filter((line) => line.length > 0)
.map((line) => JSON.parse(line) as Scenario);
console.log("scenarios", scenarios.length);
await prisma.testScenario.createMany({
data: scenarios.slice(0, 30).map((scenario, i) => ({
experimentId: experiment.id,
sortIndex: i,
variableValues: {
passage: scenario.passage,
question: scenario.question,
options: scenario.options?.join("\n"),
label: scenario.label,
},
})),
});
await prisma.templateVariable.createMany({
data: ["passage", "question", "options", "label"].map((label) => ({
experimentId: experiment.id,
label,
})),
});
await prisma.promptVariant.createMany({
data: [
{
experimentId: experiment.id,
label: "Prompt Variant 1",
sortIndex: 0,
model: "gpt-3.5-turbo-0613",
modelProvider: "openai/ChatCompletion",
promptConstructorVersion,
promptConstructor: dedent`
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo-0613",
messages: [
{
role: "user",
content: \`Passage: ${"$"}{scenario.passage}\n\nQuestion: ${"$"}{scenario.question}\n\nOptions: ${"$"}{scenario.options}\n\n Respond with just the letter of the best option in the format Answer: (A).\`
}
],
temperature: 0,
})`,
},
],
});
await prisma.evaluation.createMany({
data: [
{
experimentId: experiment.id,
label: "Eval",
evalType: "CONTAINS",
value: "Answer: ({{label}})",
},
],
});
}

File diff suppressed because one or more lines are too long

View File

@@ -1,114 +0,0 @@
import { prisma } from "~/server/db";
import dedent from "dedent";
import fs from "fs";
import { parse } from "csv-parse/sync";
import { promptConstructorVersion } from "~/promptConstructor/version";
const defaultId = "11111111-1111-1111-1111-111111111112";
await prisma.project.deleteMany({
where: { id: defaultId },
});
// If there's an existing project, just seed into it
const project =
(await prisma.project.findFirst({})) ??
(await prisma.project.create({
data: { id: defaultId },
}));
type Scenario = {
text: string;
sentiment: string;
emotion: string;
};
const experimentName = `Twitter Sentiment Analysis`;
const oldExperiment = await prisma.experiment.findFirst({
where: {
label: experimentName,
projectId: project.id,
},
});
if (oldExperiment) {
await prisma.experiment.deleteMany({
where: { id: oldExperiment.id },
});
}
const experiment = await prisma.experiment.create({
data: {
id: oldExperiment?.id ?? undefined,
label: experimentName,
projectId: project.id,
},
});
const content = fs.readFileSync("./prisma/datasets/validated_tweets.csv", "utf8");
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const records: any[] = parse(content, { delimiter: ",", from_line: 2 });
console.log("records", records);
const scenarios: Scenario[] = records.map((row) => ({
text: row[0],
sentiment: row[1],
emotion: row[2],
}));
console.log("scenarios", scenarios.length);
await prisma.testScenario.createMany({
data: scenarios.slice(0, 30).map((scenario, i) => ({
experimentId: experiment.id,
sortIndex: i,
variableValues: {
text: scenario.text,
sentiment: scenario.sentiment,
emotion: scenario.emotion,
},
})),
});
await prisma.templateVariable.createMany({
data: ["text", "sentiment", "emotion"].map((label) => ({
experimentId: experiment.id,
label,
})),
});
await prisma.promptVariant.createMany({
data: [
{
experimentId: experiment.id,
label: "Prompt Variant 1",
sortIndex: 0,
model: "gpt-3.5-turbo-0613",
modelProvider: "openai/ChatCompletion",
promptConstructorVersion,
promptConstructor: dedent`
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo-0613",
messages: [
{
role: "user",
content: \`Text: ${"$"}{scenario.text}\n\nRespond with the sentiment (negative|neutral|positive) and emotion (optimism|joy|anger|sadness) of the tweet in this format: "answer: <sentiment>-<emotion>".\`
}
],
temperature: 0,
})`,
},
],
});
await prisma.evaluation.createMany({
data: [
{
experimentId: experiment.id,
label: "Eval",
evalType: "CONTAINS",
value: "answer: {{sentiment}}-{{emotion}}",
},
],
});

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 800 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

View File

@@ -1,28 +0,0 @@
<svg width="398" height="550" viewBox="0 0 398 550" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M39 125H359V542C359 546.418 355.418 550 351 550H47C42.5817 550 39 546.418 39 542V125Z" fill="black"/>
<path d="M0 8C0 3.58172 3.58172 0 8 0H390C394.418 0 398 3.58172 398 8V127C398 131.418 394.418 135 390 135H7.99999C3.58171 135 0 131.418 0 127V8Z" fill="black"/>
<path d="M50 135H348V535C348 537.209 346.209 539 344 539H54C51.7909 539 50 537.209 50 535V135Z" fill="#FF5733"/>
<path d="M11 14.0001C11 11.791 12.7909 10.0001 15 10.0001H384C386.209 10.0001 388 11.791 388 14.0001V120C388 122.209 386.209 124 384 124H15C12.7909 124 11 122.209 11 120V14.0001Z" fill="#FF5733"/>
<path d="M11 14.0001C11 11.791 12.7909 10.0001 15 10.0001H384C386.209 10.0001 388 11.791 388 14.0001V120C388 122.209 386.209 124 384 124H15C12.7909 124 11 122.209 11 120V14.0001Z" fill="url(#paint0_linear_102_49)"/>
<path d="M50 134H348V535C348 537.209 346.209 539 344 539H54C51.7909 539 50 537.209 50 535V134Z" fill="url(#paint1_linear_102_49)"/>
<path d="M108 142H156V535H108V142Z" fill="white"/>
<path d="M300 135H348V535C348 537.209 346.209 539 344 539H300V135Z" fill="white" fill-opacity="0.25"/>
<path d="M96 142H108V535H96V142Z" fill="white" fill-opacity="0.5"/>
<path d="M84 10.0001H133V120H84V10.0001Z" fill="white"/>
<path d="M339 10.0001H384C386.209 10.0001 388 11.791 388 14.0001V120C388 122.209 386.209 124 384 124H339V10.0001Z" fill="white" fill-opacity="0.25"/>
<path d="M71.9995 10.0001H83.9995V120H71.9995V10.0001Z" fill="white" fill-opacity="0.5"/>
<path d="M108 534.529H156V539.019H108V534.529Z" fill="#AAAAAA"/>
<path opacity="0.5" d="M95.9927 534.529H107.982V539.019H95.9927V534.529Z" fill="#AAAAAA"/>
<path d="M84.0029 119.887H133.007V124.027H84.0029V119.887Z" fill="#AAAAAA"/>
<path opacity="0.5" d="M71.9883 119.887H83.978V124.027H71.9883V119.887Z" fill="#AAAAAA"/>
<defs>
<linearGradient id="paint0_linear_102_49" x1="335" y1="67.0001" x2="137" y2="67.0001" gradientUnits="userSpaceOnUse">
<stop stop-color="#D62600"/>
<stop offset="1" stop-color="#FF5733" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint1_linear_102_49" x1="306.106" y1="336.5" x2="149.597" y2="336.5" gradientUnits="userSpaceOnUse">
<stop stop-color="#D62600"/>
<stop offset="1" stop-color="#FF5733" stop-opacity="0"/>
</linearGradient>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

View File

@@ -1,6 +0,0 @@
#! /bin/bash
set -e
cd "$(dirname "$0")/.."
apt-get update
apt-get install -y htop psql

View File

@@ -1,10 +0,0 @@
#! /bin/bash
set -e
echo "Migrating the database"
pnpm prisma migrate deploy
echo "Starting 4 workers"
pnpm concurrently "pnpm worker" "pnpm worker" "pnpm worker" "pnpm worker"

View File

@@ -1,13 +0,0 @@
#! /bin/bash
set -e
cd "$(dirname "$0")/../.."
echo "Env is"
echo $ENVIRONMENT
docker build . --file app/Dockerfile --tag "openpipe-prod"
# Run the image
docker run --env-file app/.env -it --entrypoint "/bin/bash" "openpipe-prod"

View File

@@ -1,33 +0,0 @@
// This file configures the initialization of Sentry on the client.
// The config you add here will be used whenever a users loads a page in their browser.
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
import * as Sentry from "@sentry/nextjs";
import { env } from "~/env.mjs";
if (env.NEXT_PUBLIC_SENTRY_DSN) {
Sentry.init({
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
replaysOnErrorSampleRate: 1.0,
// This sets the sample rate to be 10%. You may want this to be 100% while
// in development and sample at a lower rate in production
replaysSessionSampleRate: 0.1,
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
integrations: [
new Sentry.Replay({
// Additional Replay configuration goes in here, for example:
maskAllText: true,
blockAllMedia: true,
}),
],
});
}

View File

@@ -1,19 +0,0 @@
// This file configures the initialization of Sentry for edge features (middleware, edge routes, and so on).
// The config you add here will be used whenever one of the edge features is loaded.
// Note that this config is unrelated to the Vercel Edge Runtime and is also required when running locally.
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
import * as Sentry from "@sentry/nextjs";
import { env } from "~/env.mjs";
if (env.NEXT_PUBLIC_SENTRY_DSN) {
Sentry.init({
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
});
}

View File

@@ -1,25 +0,0 @@
// This file configures the initialization of Sentry on the server.
// The config you add here will be used whenever the server handles a request.
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
import * as Sentry from "@sentry/nextjs";
import { isError } from "lodash-es";
import { env } from "~/env.mjs";
if (env.NEXT_PUBLIC_SENTRY_DSN) {
Sentry.init({
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
});
} else {
// Install local debug exception handler for rejected promises
process.on("unhandledRejection", (reason) => {
const reasonDetails = isError(reason) ? reason?.stack : reason;
console.log("Unhandled Rejection at:", reasonDetails);
});
}

View File

@@ -1,55 +0,0 @@
import { useState } from "react";
import { Button, HStack, type ButtonProps, Icon, Text } from "@chakra-ui/react";
import { type IconType } from "react-icons";
import { useAppStore } from "~/state/store";
import { BetaModal } from "./BetaModal";
const ActionButton = ({
icon,
iconBoxSize = 3.5,
label,
requireBeta = false,
onClick,
...buttonProps
}: {
icon: IconType;
iconBoxSize?: number;
label: string;
requireBeta?: boolean;
onClick?: () => void;
} & ButtonProps) => {
const flags = useAppStore((s) => s.featureFlags.featureFlags);
const flagsLoaded = useAppStore((s) => s.featureFlags.flagsLoaded);
const [betaModalOpen, setBetaModalOpen] = useState(false);
const isBetaBlocked = requireBeta && flagsLoaded && !flags.betaAccess;
return (
<>
<Button
colorScheme="blue"
color="black"
bgColor="white"
borderColor="gray.300"
borderRadius={4}
variant="outline"
size="sm"
fontSize="sm"
fontWeight="normal"
onClick={isBetaBlocked ? () => setBetaModalOpen(true) : onClick}
{...buttonProps}
>
<HStack spacing={1}>
{icon && (
<Icon as={icon} boxSize={iconBoxSize} color={requireBeta ? "orange.400" : undefined} />
)}
<Text display={{ base: "none", md: "flex" }}>{label}</Text>
</HStack>
</Button>
<BetaModal isOpen={betaModalOpen} onClose={() => setBetaModalOpen(false)} />
</>
);
};
export default ActionButton;

View File

@@ -1,65 +0,0 @@
import {
Button,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
ModalOverlay,
VStack,
Text,
HStack,
Icon,
Link,
} from "@chakra-ui/react";
import { BsStars } from "react-icons/bs";
import { useSession } from "next-auth/react";
export const BetaModal = ({ isOpen, onClose }: { isOpen: boolean; onClose: () => void }) => {
const session = useSession();
const email = session.data?.user.email ?? "";
return (
<Modal
isOpen={isOpen}
onClose={onClose}
closeOnOverlayClick={false}
size={{ base: "xl", md: "2xl" }}
>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={BsStars} />
<Text>Beta-Only Feature</Text>
</HStack>
</ModalHeader>
<ModalBody maxW="unset">
<VStack spacing={8} py={4} alignItems="flex-start">
<Text fontSize="md">
This feature is currently in beta. To receive early access to beta-only features, join
the waitlist. You'll receive an email at <b>{email}</b> when you're approved.
</Text>
</VStack>
</ModalBody>
<ModalFooter>
<HStack spacing={4}>
<Button
as={Link}
textDecoration="none !important"
colorScheme="orange"
target="_blank"
href={`https://ax3nafkw0jp.typeform.com/to/ZNpYqvAc#email=${email}`}
>
Join Waitlist
</Button>
<Button colorScheme="blue" onClick={onClose}>
Done
</Button>
</HStack>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -1,36 +0,0 @@
import { Text, VStack } from "@chakra-ui/react";
import { type LegacyRef } from "react";
import Select from "react-select";
import { useElementDimensions } from "~/utils/hooks";
import { flatMap } from "lodash-es";
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
import { type ProviderModel } from "~/modelProviders/types";
import { modelLabel } from "~/utils/utils";
const modelOptions = flatMap(Object.entries(frontendModelProviders), ([providerId, provider]) =>
Object.entries(provider.models).map(([modelId]) => ({
provider: providerId,
model: modelId,
})),
) as ProviderModel[];
export const ModelSearch = (props: {
selectedModel: ProviderModel;
setSelectedModel: (model: ProviderModel) => void;
}) => {
const [containerRef, containerDimensions] = useElementDimensions();
return (
<VStack ref={containerRef as LegacyRef<HTMLDivElement>} w="full" fontFamily="inconsolata">
<Text fontWeight="bold">Browse Models</Text>
<Select<ProviderModel>
styles={{ control: (provided) => ({ ...provided, width: containerDimensions?.width }) }}
getOptionLabel={(data) => modelLabel(data.provider, data.model)}
getOptionValue={(data) => modelLabel(data.provider, data.model)}
options={modelOptions}
onChange={(option) => option && props.setSelectedModel(option)}
/>
</VStack>
);
};

View File

@@ -1,51 +0,0 @@
import { HStack, Icon, IconButton, Tooltip, Text, type StackProps } from "@chakra-ui/react";
import { useState } from "react";
import { MdContentCopy } from "react-icons/md";
import { useHandledAsyncCallback } from "~/utils/hooks";
const CopiableCode = ({ code, ...rest }: { code: string } & StackProps) => {
const [copied, setCopied] = useState(false);
const [copyToClipboard] = useHandledAsyncCallback(async () => {
await navigator.clipboard.writeText(code);
setCopied(true);
}, [code]);
return (
<HStack
backgroundColor="blackAlpha.800"
color="white"
borderRadius={4}
padding={3}
w="full"
justifyContent="space-between"
alignItems="flex-start"
{...rest}
>
<Text
fontFamily="inconsolata"
fontWeight="bold"
letterSpacing={0.5}
overflowX="auto"
whiteSpace="pre-wrap"
>
{code}
{/* Necessary for trailing newline to actually be displayed */}
{code.endsWith("\n") ? "\n" : ""}
</Text>
<Tooltip closeOnClick={false} label={copied ? "Copied!" : "Copy to clipboard"}>
<IconButton
aria-label="Copy"
icon={<Icon as={MdContentCopy} boxSize={5} />}
size="xs"
colorScheme="white"
variant="ghost"
onClick={copyToClipboard}
onMouseLeave={() => setCopied(false)}
/>
</Tooltip>
</HStack>
);
};
export default CopiableCode;

View File

@@ -1,55 +0,0 @@
import { Box, IconButton, useToast } from "@chakra-ui/react";
import { CopyIcon } from "lucide-react";
import SyntaxHighlighter from "react-syntax-highlighter";
import { atelierCaveLight } from "react-syntax-highlighter/dist/cjs/styles/hljs";
import stringify from "json-stringify-pretty-compact";
const FormattedJson = ({ json }: { json: any }) => {
const jsonString = stringify(json, { maxLength: 40 });
const toast = useToast();
const copyToClipboard = async (text: string) => {
try {
await navigator.clipboard.writeText(text);
toast({
title: "Copied to clipboard",
status: "success",
duration: 2000,
});
} catch (err) {
toast({
title: "Failed to copy to clipboard",
status: "error",
duration: 2000,
});
}
};
return (
<Box position="relative" fontSize="sm" borderRadius="md" overflow="hidden">
<SyntaxHighlighter
customStyle={{ overflowX: "unset" }}
language="json"
style={atelierCaveLight}
lineProps={{
style: { wordBreak: "break-all", whiteSpace: "pre-wrap" },
}}
wrapLines
>
{jsonString}
</SyntaxHighlighter>
<IconButton
aria-label="Copy"
icon={<CopyIcon />}
position="absolute"
top={1}
right={1}
size="xs"
variant="ghost"
onClick={() => void copyToClipboard(jsonString)}
/>
</Box>
);
};
export { FormattedJson };

View File

@@ -1,14 +0,0 @@
import { Tooltip, Icon, VStack } from "@chakra-ui/react";
import { RiInformationFill } from "react-icons/ri";
const InfoCircle = ({ tooltipText }: { tooltipText: string }) => {
return (
<Tooltip label={tooltipText} fontSize="sm" shouldWrapChildren maxW={80}>
<VStack>
<Icon as={RiInformationFill} boxSize={5} color="gray.500" />
</VStack>
</Tooltip>
);
};
export default InfoCircle;

View File

@@ -1,100 +0,0 @@
import {
Input,
InputGroup,
InputRightElement,
Icon,
Popover,
PopoverTrigger,
PopoverContent,
VStack,
HStack,
Button,
Text,
useDisclosure,
type InputGroupProps,
} from "@chakra-ui/react";
import { FiChevronDown } from "react-icons/fi";
import { BiCheck } from "react-icons/bi";
import { isEqual } from "lodash-es";
import React from "react";
type InputDropdownProps<T> = {
options: ReadonlyArray<T>;
selectedOption: T;
onSelect: (option: T) => void;
inputGroupProps?: InputGroupProps;
getDisplayLabel?: (option: T) => string;
isDisabled?: boolean;
};
const InputDropdown = <T,>({
options,
selectedOption,
onSelect,
inputGroupProps,
getDisplayLabel = (option) => option as string,
isDisabled,
}: InputDropdownProps<T>) => {
const { onOpen, ...popover } = useDisclosure();
return (
<Popover placement="bottom-start" onOpen={isDisabled ? undefined : onOpen} {...popover}>
<PopoverTrigger>
<InputGroup
cursor="pointer"
w={getDisplayLabel(selectedOption).length * 14 + 180}
{...inputGroupProps}
>
<Input
value={getDisplayLabel(selectedOption)}
// eslint-disable-next-line @typescript-eslint/no-empty-function -- controlled input requires onChange
onChange={() => {}}
cursor="pointer"
borderColor={popover.isOpen ? "blue.500" : undefined}
_hover={popover.isOpen ? { borderColor: "blue.500" } : undefined}
contentEditable={false}
// disable focus
onFocus={(e) => {
e.target.blur();
}}
isDisabled={isDisabled}
/>
<InputRightElement>
<Icon as={FiChevronDown} color={isDisabled ? "gray.300" : undefined} />
</InputRightElement>
</InputGroup>
</PopoverTrigger>
<PopoverContent boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);" minW={0} w="auto">
<VStack spacing={0}>
{options?.map((option, index) => (
<HStack
key={index}
as={Button}
onClick={() => {
onSelect(option);
popover.onClose();
}}
w="full"
variant="ghost"
justifyContent="space-between"
fontWeight="semibold"
borderRadius={0}
colorScheme="blue"
color="black"
fontSize="sm"
borderBottomWidth={1}
>
<Text mr={16}>{getDisplayLabel(option)}</Text>
{isEqual(option, selectedOption) && (
<Icon as={BiCheck} color="blue.500" boxSize={5} />
)}
</HStack>
))}
</VStack>
</PopoverContent>
</Popover>
);
};
export default InputDropdown;

View File

@@ -1,53 +0,0 @@
import { HStack, Icon, IconButton, Spinner, Tooltip, useDisclosure } from "@chakra-ui/react";
import { BsArrowClockwise, BsInfoCircle } from "react-icons/bs";
import { useExperimentAccess } from "~/utils/hooks";
import PromptModal from "./PromptModal";
import { type RouterOutputs } from "~/utils/api";
export const CellOptions = ({
cell,
refetchingOutput,
refetchOutput,
}: {
cell: RouterOutputs["scenarioVariantCells"]["get"];
refetchingOutput: boolean;
refetchOutput: () => void;
}) => {
const { canModify } = useExperimentAccess();
const modalDisclosure = useDisclosure();
return (
<HStack justifyContent="flex-end" w="full" spacing={1}>
{cell && (
<>
<Tooltip label="See Prompt">
<IconButton
aria-label="See Prompt"
icon={<Icon as={BsInfoCircle} boxSize={3.5} />}
onClick={modalDisclosure.onOpen}
size="xs"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
<PromptModal cell={cell} disclosure={modalDisclosure} />
</>
)}
{canModify && (
<Tooltip label="Refetch output">
<IconButton
size="xs"
color="gray.500"
variant="ghost"
cursor="pointer"
onClick={refetchOutput}
aria-label="refetch output"
icon={<Icon as={refetchingOutput ? Spinner : BsArrowClockwise} boxSize={4} />}
/>
</Tooltip>
)}
</HStack>
);
};

View File

@@ -1,29 +0,0 @@
import { type StackProps, VStack } from "@chakra-ui/react";
import { type RouterOutputs } from "~/utils/api";
import { type Scenario } from "../types";
import { CellOptions } from "./CellOptions";
import { OutputStats } from "./OutputStats";
const CellWrapper: React.FC<
StackProps & {
cell: RouterOutputs["scenarioVariantCells"]["get"] | undefined;
hardRefetching: boolean;
hardRefetch: () => void;
mostRecentResponse:
| NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>["modelResponses"][0]
| undefined;
scenario: Scenario;
}
> = ({ children, cell, hardRefetching, hardRefetch, mostRecentResponse, scenario, ...props }) => (
<VStack w="full" alignItems="flex-start" {...props} px={2} py={2} h="100%">
{cell && (
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} cell={cell} />
)}
<VStack w="full" alignItems="flex-start" maxH={500} overflowY="auto" flex={1}>
{children}
</VStack>
{mostRecentResponse && <OutputStats modelResponse={mostRecentResponse} scenario={scenario} />}
</VStack>
);
export default CellWrapper;

View File

@@ -1,196 +0,0 @@
import { Text } from "@chakra-ui/react";
import stringify from "json-stringify-pretty-compact";
import { Fragment, useEffect, useState, type ReactElement } from "react";
import SyntaxHighlighter from "react-syntax-highlighter";
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useScenarioVars } from "~/utils/hooks";
import useSocket from "~/utils/useSocket";
import { type PromptVariant, type Scenario } from "../types";
import CellWrapper from "./CellWrapper";
import { ResponseLog } from "./ResponseLog";
import { RetryCountdown } from "./RetryCountdown";
const WAITING_MESSAGE_INTERVAL = 20000;
export default function OutputCell({
scenario,
variant,
}: {
scenario: Scenario;
variant: PromptVariant;
}): ReactElement | null {
const utils = api.useContext();
const vars = useScenarioVars().data;
const scenarioVariables = scenario.variableValues as Record<string, string>;
const templateHasVariables =
vars?.length === 0 || vars?.some((v) => scenarioVariables[v.label] !== undefined);
let disabledReason: string | null = null;
if (!templateHasVariables) disabledReason = "Add a value to the scenario variables to see output";
const [refetchInterval, setRefetchInterval] = useState<number | false>(false);
const { data: cell, isLoading: queryLoading } = api.scenarioVariantCells.get.useQuery(
{ scenarioId: scenario.id, variantId: variant.id },
{ refetchInterval },
);
const provider =
frontendModelProviders[variant.modelProvider as keyof typeof frontendModelProviders];
type OutputSchema = Parameters<typeof provider.normalizeOutput>[0];
const { mutateAsync: hardRefetchMutate } = api.scenarioVariantCells.hardRefetch.useMutation();
const [hardRefetch, hardRefetching] = useHandledAsyncCallback(async () => {
await hardRefetchMutate({ scenarioId: scenario.id, variantId: variant.id });
await utils.scenarioVariantCells.get.invalidate({
scenarioId: scenario.id,
variantId: variant.id,
});
await utils.promptVariants.stats.invalidate({
variantId: variant.id,
});
}, [hardRefetchMutate, scenario.id, variant.id]);
const fetchingOutput = queryLoading || hardRefetching;
const awaitingOutput =
!cell ||
!cell.evalsComplete ||
cell.retrievalStatus === "PENDING" ||
cell.retrievalStatus === "IN_PROGRESS" ||
hardRefetching;
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : false), [awaitingOutput]);
// TODO: disconnect from socket if we're not streaming anymore
const streamedMessage = useSocket<OutputSchema>(cell?.id);
const mostRecentResponse = cell?.modelResponses[cell.modelResponses.length - 1];
const wrapperProps: Parameters<typeof CellWrapper>[0] = {
cell,
hardRefetching,
hardRefetch,
mostRecentResponse,
scenario,
};
if (!vars) return null;
if (!cell && !fetchingOutput)
return (
<CellWrapper {...wrapperProps}>
<Text color="gray.500">Error retrieving output</Text>
</CellWrapper>
);
if (cell && cell.errorMessage) {
return (
<CellWrapper {...wrapperProps}>
<Text color="red.500">{cell.errorMessage}</Text>
</CellWrapper>
);
}
if (disabledReason) return <Text color="gray.500">{disabledReason}</Text>;
const showLogs = !streamedMessage && !mostRecentResponse?.respPayload;
if (showLogs)
return (
<CellWrapper
{...wrapperProps}
alignItems="flex-start"
fontFamily="inconsolata, monospace"
spacing={0}
>
{cell?.jobQueuedAt && <ResponseLog time={cell.jobQueuedAt} title="Job queued" />}
{cell?.jobStartedAt && <ResponseLog time={cell.jobStartedAt} title="Job started" />}
{cell?.modelResponses?.map((response) => {
let numWaitingMessages = 0;
const relativeWaitingTime = response.receivedAt
? response.receivedAt.getTime()
: Date.now();
if (response.requestedAt) {
numWaitingMessages = Math.min(
Math.floor(
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
),
// Don't try to render more than 15, it'll use too much CPU and
// break the page
15,
);
}
return (
<Fragment key={response.id}>
{response.requestedAt && (
<ResponseLog time={response.requestedAt} title="Request sent to API" />
)}
{response.requestedAt &&
Array.from({ length: numWaitingMessages }, (_, i) => (
<ResponseLog
key={`waiting-${i}`}
time={
new Date(
(response.requestedAt?.getTime?.() ?? 0) +
(i + 1) * WAITING_MESSAGE_INTERVAL,
)
}
title="Waiting for response..."
/>
))}
{response.receivedAt && (
<ResponseLog
time={response.receivedAt}
title="Response received from API"
message={[
response.statusCode ? `Status: ${response.statusCode}\n` : "",
response.errorMessage ?? "",
].join("")}
/>
)}
</Fragment>
);
}) ?? null}
{mostRecentResponse?.retryTime && (
<RetryCountdown retryTime={mostRecentResponse.retryTime} />
)}
</CellWrapper>
);
const normalizedOutput = mostRecentResponse?.respPayload
? provider.normalizeOutput(mostRecentResponse?.respPayload)
: streamedMessage
? provider.normalizeOutput(streamedMessage)
: null;
if (mostRecentResponse?.respPayload && normalizedOutput?.type === "json") {
return (
<CellWrapper {...wrapperProps}>
<SyntaxHighlighter
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
language="json"
style={docco}
lineProps={{
style: { wordBreak: "break-all", whiteSpace: "pre-wrap" },
}}
wrapLines
>
{stringify(normalizedOutput.value, { maxLength: 40 })}
</SyntaxHighlighter>
</CellWrapper>
);
}
const contentToDisplay = (normalizedOutput?.type === "text" && normalizedOutput.value) || "";
return (
<CellWrapper {...wrapperProps}>
<Text whiteSpace="pre-wrap">{contentToDisplay}</Text>
</CellWrapper>
);
}

View File

@@ -1,109 +0,0 @@
import {
Modal,
ModalBody,
ModalCloseButton,
ModalContent,
ModalHeader,
ModalOverlay,
VStack,
Text,
Box,
type UseDisclosureReturn,
Link,
} from "@chakra-ui/react";
import { api, type RouterOutputs } from "~/utils/api";
import { JSONTree } from "react-json-tree";
import CopiableCode from "~/components/CopiableCode";
const theme = {
scheme: "chalk",
author: "chris kempson (http://chriskempson.com)",
base00: "transparent",
base01: "#202020",
base02: "#303030",
base03: "#505050",
base04: "#b0b0b0",
base05: "#d0d0d0",
base06: "#e0e0e0",
base07: "#f5f5f5",
base08: "#fb9fb1",
base09: "#eda987",
base0A: "#ddb26f",
base0B: "#acc267",
base0C: "#12cfc0",
base0D: "#6fc2ef",
base0E: "#e1a3ee",
base0F: "#deaf8f",
};
export default function PromptModal(props: {
cell: NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>;
disclosure: UseDisclosureReturn;
}) {
const { data } = api.scenarioVariantCells.getTemplatedPromptMessage.useQuery(
{
cellId: props.cell.id,
},
{
enabled: props.disclosure.isOpen,
},
);
return (
<Modal isOpen={props.disclosure.isOpen} onClose={props.disclosure.onClose} size="xl">
<ModalOverlay />
<ModalContent>
<ModalHeader>Prompt Details</ModalHeader>
<ModalCloseButton />
<ModalBody>
<VStack py={4} w="">
<VStack w="full" alignItems="flex-start">
<Text fontWeight="bold">Full Prompt</Text>
<Box
w="full"
p={4}
alignItems="flex-start"
backgroundColor="blackAlpha.800"
borderRadius={4}
>
<JSONTree
data={props.cell.prompt}
theme={theme}
shouldExpandNodeInitially={() => true}
getItemString={() => ""}
hideRoot
/>
</Box>
</VStack>
{data?.templatedPrompt && (
<VStack w="full" mt={4} alignItems="flex-start">
<Text fontWeight="bold">Templated prompt message:</Text>
<CopiableCode
w="full"
// bgColor="gray.100"
p={4}
borderWidth={1}
whiteSpace="pre-wrap"
code={data.templatedPrompt}
/>
</VStack>
)}
{data?.learnMoreUrl && (
<Link
href={data.learnMoreUrl}
isExternal
color="blue.500"
fontWeight="bold"
fontSize="sm"
mt={4}
alignSelf="flex-end"
>
Learn More
</Link>
)}
</VStack>
</ModalBody>
</ModalContent>
</Modal>
);
}

View File

@@ -1,22 +0,0 @@
import { HStack, VStack, Text } from "@chakra-ui/react";
import dayjs from "dayjs";
export const ResponseLog = ({
time,
title,
message,
}: {
time: Date;
title: string;
message?: string;
}) => {
return (
<VStack spacing={0} alignItems="flex-start">
<HStack>
<Text>{dayjs(time).format("HH:mm:ss")}</Text>
<Text>{title}</Text>
</HStack>
{message && <Text pl={4}>{message}</Text>}
</VStack>
);
};

View File

@@ -1,211 +0,0 @@
import { isEqual } from "lodash-es";
import { useEffect, useState, type DragEvent } from "react";
import { api } from "~/utils/api";
import { useExperimentAccess, useHandledAsyncCallback, useScenarioVars } from "~/utils/hooks";
import { type Scenario } from "./types";
import {
Box,
Button,
HStack,
Icon,
IconButton,
Spinner,
Text,
Tooltip,
VStack,
} from "@chakra-ui/react";
import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
import { cellPadding } from "./constants";
import { FloatingLabelInput } from "./FloatingLabelInput";
import { ScenarioEditorModal } from "./ScenarioEditorModal";
export default function ScenarioEditor({
scenario,
...props
}: {
scenario: Scenario;
hovered: boolean;
canHide: boolean;
}) {
const { canModify } = useExperimentAccess();
const savedValues = scenario.variableValues as Record<string, string>;
const utils = api.useContext();
const [isDragTarget, setIsDragTarget] = useState(false);
const [variableInputHovered, setVariableInputHovered] = useState(false);
const [values, setValues] = useState<Record<string, string>>(savedValues);
useEffect(() => {
if (savedValues) setValues(savedValues);
}, [savedValues]);
const vars = useScenarioVars();
const variableLabels = vars.data?.map((v) => v.label) ?? [];
const hasChanged = !isEqual(savedValues, values);
const mutation = api.scenarios.replaceWithValues.useMutation();
const [onSave] = useHandledAsyncCallback(async () => {
await mutation.mutateAsync({
id: scenario.id,
values,
});
await utils.scenarios.list.invalidate();
}, [mutation, values]);
const hideMutation = api.scenarios.hide.useMutation();
const [onHide, hidingInProgress] = useHandledAsyncCallback(async () => {
await hideMutation.mutateAsync({
id: scenario.id,
});
await utils.scenarios.list.invalidate();
await utils.promptVariants.stats.invalidate();
}, [hideMutation, scenario.id]);
const reorderMutation = api.scenarios.reorder.useMutation();
const [onReorder] = useHandledAsyncCallback(
async (e: DragEvent<HTMLDivElement>) => {
e.preventDefault();
setIsDragTarget(false);
const draggedId = e.dataTransfer.getData("text/plain");
const droppedId = scenario.id;
if (!draggedId || !droppedId || draggedId === droppedId) return;
await reorderMutation.mutateAsync({
draggedId,
droppedId,
});
await utils.scenarios.list.invalidate();
},
[reorderMutation, scenario.id],
);
const [scenarioEditorModalOpen, setScenarioEditorModalOpen] = useState(false);
return (
<>
<HStack
alignItems="flex-start"
px={cellPadding.x}
py={cellPadding.y}
spacing={0}
height="100%"
draggable={!variableInputHovered}
onDragStart={(e) => {
e.dataTransfer.setData("text/plain", scenario.id);
e.currentTarget.style.opacity = "0.4";
}}
onDragEnd={(e) => {
e.currentTarget.style.opacity = "1";
}}
onDragOver={(e) => {
e.preventDefault();
setIsDragTarget(true);
}}
onDragLeave={() => {
setIsDragTarget(false);
}}
onDrop={onReorder}
backgroundColor={isDragTarget ? "gray.100" : "transparent"}
>
{
<VStack spacing={4} flex={1} py={2}>
<HStack justifyContent="space-between" w="100%" align="center" spacing={0}>
<Text flex={1}>Scenario</Text>
{variableLabels.length && (
<Tooltip label="Expand" hasArrow>
<IconButton
aria-label="Expand"
icon={<Icon as={BsArrowsAngleExpand} boxSize={3} />}
onClick={() => setScenarioEditorModalOpen(true)}
size="xs"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
)}
{canModify && props.canHide && (
<Tooltip label="Delete" hasArrow>
<IconButton
aria-label="Delete"
icon={
<Icon
as={hidingInProgress ? Spinner : BsX}
boxSize={hidingInProgress ? 4 : 6}
/>
}
onClick={onHide}
size="xs"
display="flex"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
)}
</HStack>
{variableLabels.length === 0 ? (
<Box color="gray.500">
{vars.data ? "No scenario variables configured" : "Loading..."}
</Box>
) : (
variableLabels.map((key) => {
const value = values[key] ?? "";
return (
<FloatingLabelInput
key={key}
label={key}
isDisabled={!canModify}
style={{ width: "100%" }}
maxHeight={32}
value={value}
onChange={(e) => {
setValues((prev) => ({ ...prev, [key]: e.target.value }));
}}
onKeyDown={(e) => {
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
e.preventDefault();
e.currentTarget.blur();
onSave();
}
}}
onMouseEnter={() => setVariableInputHovered(true)}
onMouseLeave={() => setVariableInputHovered(false)}
/>
);
})
)}
{hasChanged && (
<HStack justify="right">
<Button
size="sm"
onMouseDown={() => {
setValues(savedValues);
}}
colorScheme="gray"
>
Reset
</Button>
<Button size="sm" onMouseDown={onSave} colorScheme="blue">
Save
</Button>
</HStack>
)}
</VStack>
}
</HStack>
{scenarioEditorModalOpen && (
<ScenarioEditorModal
scenarioId={scenario.id}
initialValues={savedValues}
onClose={() => setScenarioEditorModalOpen(false)}
/>
)}
</>
);
}

View File

@@ -1,123 +0,0 @@
import {
Button,
HStack,
Modal,
ModalBody,
ModalCloseButton,
ModalContent,
ModalFooter,
ModalHeader,
ModalOverlay,
Spinner,
Text,
VStack,
} from "@chakra-ui/react";
import { useEffect, useState } from "react";
import { isEqual } from "lodash-es";
import { api } from "~/utils/api";
import {
useScenario,
useHandledAsyncCallback,
useExperiment,
useExperimentAccess,
} from "~/utils/hooks";
import { FloatingLabelInput } from "./FloatingLabelInput";
export const ScenarioEditorModal = ({
scenarioId,
initialValues,
onClose,
}: {
scenarioId: string;
initialValues: Record<string, string>;
onClose: () => void;
}) => {
const utils = api.useContext();
const experiment = useExperiment();
const { canModify } = useExperimentAccess();
const scenario = useScenario(scenarioId);
const savedValues = scenario.data?.variableValues as Record<string, string>;
const [values, setValues] = useState<Record<string, string>>(initialValues);
useEffect(() => {
if (savedValues) setValues(savedValues);
}, [savedValues]);
const hasChanged = !isEqual(savedValues, values);
const mutation = api.scenarios.replaceWithValues.useMutation();
const [onSave, saving] = useHandledAsyncCallback(async () => {
await mutation.mutateAsync({
id: scenarioId,
values,
});
await utils.scenarios.list.invalidate();
}, [mutation, values]);
const vars = api.scenarioVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
const variableLabels = vars.data?.map((v) => v.label) ?? [];
return (
<Modal
isOpen
onClose={onClose}
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "4xl", xl: "5xl" }}
>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>Edit Scenario</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack spacing={8}>
{values &&
variableLabels.map((key) => {
const value = values[key] ?? "";
return (
<FloatingLabelInput
key={key}
label={key}
isDisabled={!canModify}
_disabled={{ opacity: 1 }}
style={{ width: "100%" }}
value={value}
onChange={(e) => {
setValues((prev) => ({ ...prev, [key]: e.target.value }));
}}
onKeyDown={(e) => {
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
e.preventDefault();
e.currentTarget.blur();
onSave();
}
}}
/>
);
})}
</VStack>
</ModalBody>
<ModalFooter>
{canModify && (
<HStack>
<Button
colorScheme="gray"
onClick={() => setValues(savedValues)}
minW={24}
isDisabled={!hasChanged}
>
<Text>Reset</Text>
</Button>
<Button colorScheme="blue" onClick={onSave} minW={24} isDisabled={!hasChanged}>
{saving ? <Spinner boxSize={4} /> : <Text>Save</Text>}
</Button>
</HStack>
)}
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -1,16 +0,0 @@
import { type StackProps } from "@chakra-ui/react";
import { useScenarios } from "~/utils/hooks";
import Paginator from "../Paginator";
const ScenarioPaginator = (props: StackProps) => {
const { data } = useScenarios();
if (!data) return null;
const { count } = data;
return <Paginator count={count} condense {...props} />;
};
export default ScenarioPaginator;

View File

@@ -1,6 +0,0 @@
import { type GridItemProps } from "@chakra-ui/react";
export const borders: GridItemProps = {
borderRightWidth: 1,
borderBottomWidth: 1,
};

View File

@@ -1,34 +0,0 @@
import { useState, useEffect } from "react";
const useScrolledPast = (scrollThreshold: number) => {
const [hasScrolledPast, setHasScrolledPast] = useState(true);
useEffect(() => {
const container = document.getElementById("output-container");
if (!container) {
console.warn('Element with id "outputs-container" not found.');
return;
}
const checkScroll = () => {
const { scrollTop } = container;
// Check if scrollTop is greater than or equal to scrollThreshold
setHasScrolledPast(scrollTop > scrollThreshold);
};
checkScroll();
container.addEventListener("scroll", checkScroll);
// Cleanup
return () => {
container.removeEventListener("scroll", checkScroll);
};
}, []);
return hasScrolledPast;
};
export default useScrolledPast;

View File

@@ -1,128 +0,0 @@
import {
HStack,
IconButton,
Text,
Select,
type StackProps,
Icon,
useBreakpointValue,
} from "@chakra-ui/react";
import React, { useCallback } from "react";
import { FiChevronsLeft, FiChevronsRight, FiChevronLeft, FiChevronRight } from "react-icons/fi";
import { usePageParams } from "~/utils/hooks";
const pageSizeOptions = [10, 25, 50, 100];
const Paginator = ({ count, ...props }: { count: number; condense?: boolean } & StackProps) => {
const { page, pageSize, setPageParams } = usePageParams();
const lastPage = Math.ceil(count / pageSize);
const updatePageSize = useCallback(
(newPageSize: number) => {
const newPage = Math.floor(((page - 1) * pageSize) / newPageSize) + 1;
setPageParams({ page: newPage, pageSize: newPageSize }, "replace");
},
[page, pageSize, setPageParams],
);
const nextPage = () => {
if (page < lastPage) {
setPageParams({ page: page + 1 }, "replace");
}
};
const prevPage = () => {
if (page > 1) {
setPageParams({ page: page - 1 }, "replace");
}
};
const goToLastPage = () => setPageParams({ page: lastPage }, "replace");
const goToFirstPage = () => setPageParams({ page: 1 }, "replace");
const isMobile = useBreakpointValue({ base: true, md: false });
const condense = isMobile || props.condense;
if (count === 0) return null;
return (
<HStack
pt={4}
spacing={8}
justifyContent={condense ? "flex-start" : "space-between"}
alignItems="center"
w="full"
{...props}
>
{!condense && (
<>
<HStack>
<Text>Rows</Text>
<Select
value={pageSize}
onChange={(e) => updatePageSize(parseInt(e.target.value))}
w={20}
backgroundColor="white"
>
{pageSizeOptions.map((option) => (
<option key={option} value={option}>
{option}
</option>
))}
</Select>
</HStack>
<Text>
Page {page} of {lastPage}
</Text>
</>
)}
<HStack>
<IconButton
variant="outline"
size="sm"
onClick={goToFirstPage}
isDisabled={page === 1}
aria-label="Go to first page"
icon={<Icon as={FiChevronsLeft} boxSize={5} strokeWidth={1.5} />}
bgColor="white"
/>
<IconButton
variant="outline"
size="sm"
onClick={prevPage}
isDisabled={page === 1}
aria-label="Previous page"
icon={<Icon as={FiChevronLeft} boxSize={5} strokeWidth={1.5} />}
bgColor="white"
/>
{condense && (
<Text>
Page {page} of {lastPage}
</Text>
)}
<IconButton
variant="outline"
size="sm"
onClick={nextPage}
isDisabled={page === lastPage}
aria-label="Next page"
icon={<Icon as={FiChevronRight} boxSize={5} strokeWidth={1.5} />}
bgColor="white"
/>
<IconButton
variant="outline"
size="sm"
onClick={goToLastPage}
isDisabled={page === lastPage}
aria-label="Go to last page"
icon={<Icon as={FiChevronsRight} boxSize={5} strokeWidth={1.5} />}
bgColor="white"
/>
</HStack>
</HStack>
);
};
export default Paginator;

View File

@@ -1,51 +0,0 @@
import { Card, CardHeader, Heading, Table, Tbody, HStack, Button, Text } from "@chakra-ui/react";
import { useState } from "react";
import Link from "next/link";
import { useLoggedCalls } from "~/utils/hooks";
import { EmptyTableRow, TableHeader, TableRow } from "../requestLogs/TableRow";
export default function LoggedCallsTable() {
const { data: loggedCalls } = useLoggedCalls(false);
const [expandedRow, setExpandedRow] = useState<string | null>(null);
return (
<Card width="100%" overflow="hidden">
<CardHeader>
<HStack justifyContent="space-between">
<Heading as="h3" size="sm">
Request Logs
</Heading>
<Button as={Link} href="/request-logs" variant="ghost" colorScheme="blue">
<Text>View All</Text>
</Button>
</HStack>
</CardHeader>
<Table>
<TableHeader />
<Tbody>
{loggedCalls?.calls.length ? (
loggedCalls?.calls.map((loggedCall) => {
return (
<TableRow
key={loggedCall.id}
loggedCall={loggedCall}
isExpanded={loggedCall.id === expandedRow}
onToggle={() => {
if (loggedCall.id === expandedRow) {
setExpandedRow(null);
} else {
setExpandedRow(loggedCall.id);
}
}}
/>
);
})
) : (
<EmptyTableRow filtersApplied={false} />
)}
</Tbody>
</Table>
</Card>
);
}

View File

@@ -1,61 +0,0 @@
import {
ResponsiveContainer,
LineChart,
Line,
XAxis,
YAxis,
CartesianGrid,
Tooltip,
Legend,
} from "recharts";
import { useMemo } from "react";
import { useSelectedProject } from "~/utils/hooks";
import dayjs from "~/utils/dayjs";
import { api } from "~/utils/api";
export default function UsageGraph() {
const { data: selectedProject } = useSelectedProject();
const stats = api.dashboard.stats.useQuery(
{ projectId: selectedProject?.id ?? "" },
{ enabled: !!selectedProject },
);
const data = useMemo(() => {
return (
stats.data?.periods.map(({ period, numQueries, cost }) => ({
period,
Requests: numQueries,
"Total Spent (USD)": parseFloat(cost.toString()),
})) || []
);
}, [stats.data]);
return (
<ResponsiveContainer width="100%" height={400}>
<LineChart data={data} margin={{ top: 5, right: 20, left: 10, bottom: 5 }}>
<XAxis dataKey="period" tickFormatter={(str: string) => dayjs(str).format("MMM D")} />
<YAxis yAxisId="left" dataKey="Requests" orientation="left" stroke="#8884d8" />
<YAxis
yAxisId="right"
dataKey="Total Spent (USD)"
orientation="right"
unit="$"
stroke="#82ca9d"
/>
<Tooltip />
<Legend />
<CartesianGrid stroke="#f5f5f5" />
<Line dataKey="Requests" stroke="#8884d8" yAxisId="left" dot={false} strokeWidth={2} />
<Line
dataKey="Total Spent (USD)"
stroke="#82ca9d"
yAxisId="right"
dot={false}
strokeWidth={2}
/>
</LineChart>
</ResponsiveContainer>
);
}

View File

@@ -1,37 +0,0 @@
import {
Drawer,
DrawerBody,
DrawerCloseButton,
DrawerContent,
DrawerHeader,
DrawerOverlay,
Heading,
VStack,
type UseDisclosureReturn,
} from "@chakra-ui/react";
import { DeleteButton } from "./DeleteButton";
export default function DatasetConfigurationDrawer({
disclosure,
}: {
disclosure: UseDisclosureReturn;
}) {
return (
<Drawer placement="right" size="md" {...disclosure}>
<DrawerOverlay />
<DrawerContent>
<DrawerCloseButton />
<DrawerHeader>
<Heading size="md">Dataset Configuration</Heading>
</DrawerHeader>
<DrawerBody h="full" pb={4}>
<VStack h="full" justifyContent="space-between">
<VStack spacing={6}></VStack>
<DeleteButton closeDrawer={disclosure.onClose} />
</VStack>
</DrawerBody>
</DrawerContent>
</Drawer>
);
}

View File

@@ -1,39 +0,0 @@
import { Button, Icon, useDisclosure, Text } from "@chakra-ui/react";
import { useRouter } from "next/router";
import { BsTrash } from "react-icons/bs";
import { useHandledAsyncCallback, useDataset } from "~/utils/hooks";
import DeleteDatasetDialog from "./DeleteDatasetDialog";
export const DeleteButton = ({ closeDrawer }: { closeDrawer: () => void }) => {
const dataset = useDataset();
const router = useRouter();
const disclosure = useDisclosure();
const [onDelete] = useHandledAsyncCallback(async () => {
await router.push({ pathname: "/datasets" });
closeDrawer();
}, [router, closeDrawer]);
return (
<>
<Button
size="sm"
variant="ghost"
colorScheme="red"
fontWeight="normal"
onClick={disclosure.onOpen}
>
<Icon as={BsTrash} boxSize={4} />
<Text ml={2}>Delete Dataset</Text>
</Button>
<DeleteDatasetDialog
datasetId={dataset.data?.id}
onDelete={onDelete}
disclosure={disclosure}
/>
</>
);
};

View File

@@ -1,73 +0,0 @@
import { useRef } from "react";
import {
type UseDisclosureReturn,
AlertDialog,
AlertDialogOverlay,
AlertDialogContent,
AlertDialogHeader,
AlertDialogBody,
AlertDialogFooter,
Button,
} from "@chakra-ui/react";
import { api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks";
const DeleteDatasetDialog = ({
datasetId,
onDelete,
disclosure,
}: {
datasetId?: string;
onDelete?: () => void;
disclosure: UseDisclosureReturn;
}) => {
const cancelRef = useRef<HTMLButtonElement>(null);
const mutation = api.datasets.delete.useMutation();
const utils = api.useContext();
const [onDeleteConfirm, deletionInProgress] = useHandledAsyncCallback(async () => {
if (!datasetId) return;
await mutation.mutateAsync({ id: datasetId });
await utils.datasets.list.invalidate();
onDelete?.();
disclosure.onClose();
}, [mutation, datasetId, disclosure.onClose]);
console.log("dataset id", datasetId);
return (
<AlertDialog leastDestructiveRef={cancelRef} {...disclosure}>
<AlertDialogOverlay>
<AlertDialogContent>
<AlertDialogHeader fontSize="lg" fontWeight="bold">
Delete Dataset
</AlertDialogHeader>
<AlertDialogBody>
If you delete this dataset all the associated dataset entries will be deleted as well.
Are you sure?
</AlertDialogBody>
<AlertDialogFooter>
<Button ref={cancelRef} onClick={disclosure.onClose}>
Cancel
</Button>
<Button
colorScheme="red"
isLoading={deletionInProgress}
onClick={onDeleteConfirm}
ml={3}
>
Delete
</Button>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialogOverlay>
</AlertDialog>
);
};
export default DeleteDatasetDialog;

View File

@@ -1,46 +0,0 @@
import { Card, Table, Tbody } from "@chakra-ui/react";
import { useState } from "react";
import { useDatasetEntries } from "~/utils/hooks";
import { TableHeader, TableRow, EmptyTableRow } from "./TableRow";
import DatasetEntryEditorDrawer from "./DatasetEntryEditorDrawer";
export default function DatasetEntriesTable() {
const [expandedDatasetEntryId, setExpandedDatasetEntryId] = useState<string | null>(null);
const datasetEntries = useDatasetEntries().data?.entries;
return (
<>
<Card width="100%" overflowX="auto">
<Table>
<TableHeader />
<Tbody>
{datasetEntries?.length ? (
datasetEntries?.map((entry) => {
return (
<TableRow
key={entry.id}
datasetEntry={entry}
onToggle={() => {
if (entry.id === expandedDatasetEntryId) {
setExpandedDatasetEntryId(null);
} else {
setExpandedDatasetEntryId(entry.id);
}
}}
showOptions
/>
);
})
) : (
<EmptyTableRow />
)}
</Tbody>
</Table>
</Card>
<DatasetEntryEditorDrawer
datasetEntryId={expandedDatasetEntryId}
clearDatasetEntryId={() => setExpandedDatasetEntryId(null)}
/>
</>
);
}

View File

@@ -1,174 +0,0 @@
import { useState, useEffect, useMemo } from "react";
import {
Drawer,
DrawerBody,
DrawerCloseButton,
DrawerContent,
DrawerHeader,
DrawerOverlay,
DrawerFooter,
Heading,
VStack,
HStack,
Button,
Text,
Divider,
Icon,
} from "@chakra-ui/react";
import { type CreateChatCompletionRequestMessage } from "openai/resources/chat";
import { BsPlus } from "react-icons/bs";
import { type DatasetEntryType } from "@prisma/client";
import { api } from "~/utils/api";
import { useDatasetEntry, useHandledAsyncCallback } from "~/utils/hooks";
import EditableMessage from "./EditableMessage";
import EntryTypeDropdown from "./EntryTypeDropdown";
export default function DatasetDentryEditorDrawer({
datasetEntryId,
clearDatasetEntryId,
}: {
datasetEntryId: string | null;
clearDatasetEntryId: () => void;
}) {
const utils = api.useContext();
const datasetEntry = useDatasetEntry(datasetEntryId).data;
const savedInputMessages = useMemo(
() => datasetEntry?.input as unknown as CreateChatCompletionRequestMessage[],
[datasetEntry],
);
const savedOutputMessage = useMemo(
() => datasetEntry?.output as unknown as CreateChatCompletionRequestMessage,
[datasetEntry],
);
const [inputMessagesToSave, setInputMessagesToSave] = useState<
CreateChatCompletionRequestMessage[]
>([]);
const [outputMessageToSave, setOutputMessageToSave] =
useState<CreateChatCompletionRequestMessage | null>(null);
useEffect(() => {
if (savedInputMessages) {
setInputMessagesToSave(savedInputMessages);
setOutputMessageToSave(savedOutputMessage);
}
}, [savedInputMessages, savedOutputMessage]);
const updateMutation = api.datasetEntries.update.useMutation();
const [onSave, savingInProgress] = useHandledAsyncCallback(async () => {
if (!datasetEntryId || !inputMessagesToSave) return;
await updateMutation.mutateAsync({
id: datasetEntryId,
updates: {
input: JSON.stringify(inputMessagesToSave),
output: JSON.stringify(outputMessageToSave),
},
});
await utils.datasetEntries.list.invalidate();
await utils.datasetEntries.get.invalidate({ id: datasetEntryId });
}, [updateMutation, datasetEntryId, inputMessagesToSave, outputMessageToSave, utils]);
const [onUpdateType] = useHandledAsyncCallback(
async (type: DatasetEntryType) => {
if (!datasetEntryId) return;
await updateMutation.mutateAsync({
id: datasetEntryId,
updates: {
type,
},
});
await utils.datasetEntries.list.invalidate();
await utils.datasetEntries.get.invalidate({ id: datasetEntryId });
},
[updateMutation, datasetEntryId, utils],
);
return (
<Drawer isOpen={!!datasetEntryId} onClose={clearDatasetEntryId} placement="right" size="md">
<DrawerOverlay />
<DrawerContent>
<DrawerCloseButton pt={6} />
<DrawerHeader bgColor="orange.50">
<HStack w="full" justifyContent="space-between" pr={8}>
<Heading size="md">Dataset Entry</Heading>
{datasetEntry && (
<EntryTypeDropdown type={datasetEntry.type} onTypeChange={onUpdateType} />
)}
</HStack>
</DrawerHeader>
<DrawerBody h="full" pb={4} bgColor="orange.50">
<VStack h="full" justifyContent="space-between">
<VStack w="full" spacing={12} py={4}>
<VStack w="full" alignItems="flex-start">
<Text fontWeight="bold">Input</Text>
{inputMessagesToSave.map((message, i) => {
return (
<>
<Divider key={`divider-${i}`} my={4} />
<EditableMessage
key={i}
message={message}
onEdit={(message) => {
const newInputMessages = [...inputMessagesToSave];
newInputMessages[i] = message;
setInputMessagesToSave(newInputMessages);
}}
onDelete={() => {
const newInputMessages = [...inputMessagesToSave];
newInputMessages.splice(i, 1);
setInputMessagesToSave(newInputMessages);
}}
/>
</>
);
})}
<Divider my={4} />
<Button
w="full"
onClick={() =>
setInputMessagesToSave([...inputMessagesToSave, { role: "user", content: "" }])
}
variant="outline"
color="gray.500"
_hover={{ bgColor: "orange.100" }}
>
<HStack spacing={0}>
<Text>Add Message</Text>
<Icon as={BsPlus} boxSize={6} />
</HStack>
</Button>
</VStack>
<VStack w="full" alignItems="flex-start">
<Text fontWeight="bold">Output</Text>
<Divider my={4} />
<EditableMessage
message={outputMessageToSave}
onEdit={(message) => setOutputMessageToSave(message)}
isOutput
/>
</VStack>
</VStack>
</VStack>
</DrawerBody>
<DrawerFooter bgColor="orange.50">
<HStack>
<Button
onClick={() => {
setInputMessagesToSave(savedInputMessages);
setOutputMessageToSave(savedOutputMessage);
}}
>
Reset
</Button>
<Button isLoading={savingInProgress} onClick={onSave} colorScheme="orange">
Save
</Button>
</HStack>
</DrawerFooter>
</DrawerContent>
</Drawer>
);
}

View File

@@ -1,105 +0,0 @@
import { VStack, HStack, Tooltip, IconButton, Icon } from "@chakra-ui/react";
import { type CreateChatCompletionRequestMessage } from "openai/resources/chat";
import { BsX } from "react-icons/bs";
import AutoResizeTextArea from "~/components/AutoResizeTextArea";
import InputDropdown from "~/components/InputDropdown";
import { parseableToFunctionCall } from "~/utils/utils";
import FunctionCallEditor from "./FunctionCallEditor";
const MESSAGE_ROLE_OPTIONS = ["system", "user", "assistant", "function"] as const;
const OUTPUT_OPTIONS = ["plaintext", "func_call"] as const;
const EditableMessage = ({
message,
onEdit,
onDelete,
isOutput,
}: {
message: CreateChatCompletionRequestMessage | null;
onEdit: (message: CreateChatCompletionRequestMessage) => void;
onDelete?: () => void;
isOutput?: boolean;
}) => {
const { role = "assistant", content = "", function_call } = message || {};
const currentOutputOption: (typeof OUTPUT_OPTIONS)[number] = function_call
? "func_call"
: "plaintext";
return (
<VStack w="full">
<HStack w="full" justifyContent="space-between">
<HStack>
{!isOutput && (
<InputDropdown
options={MESSAGE_ROLE_OPTIONS}
selectedOption={role}
onSelect={(option) => {
const updatedMessage = { role: option, content };
if (role === "assistant" && currentOutputOption === "func_call") {
updatedMessage.content = JSON.stringify(function_call, null, 2);
}
onEdit(updatedMessage);
}}
inputGroupProps={{ w: "32", bgColor: "white" }}
/>
)}
{role === "assistant" && (
<InputDropdown
options={OUTPUT_OPTIONS}
selectedOption={currentOutputOption}
onSelect={(option) => {
const updatedMessage: CreateChatCompletionRequestMessage = {
role,
content: null,
function_call: undefined,
};
if (option === "plaintext") {
updatedMessage.content = JSON.stringify(function_call, null, 2);
} else if (option === "func_call") {
updatedMessage.function_call =
content && parseableToFunctionCall(content)
? JSON.parse(content)
: { name: "", arguments: "{}" };
}
onEdit(updatedMessage);
}}
inputGroupProps={{ w: "32", bgColor: "white" }}
/>
)}
</HStack>
{!isOutput && (
<HStack>
<Tooltip label="Delete" hasArrow>
<IconButton
aria-label="Delete"
icon={<Icon as={BsX} boxSize={6} />}
onClick={onDelete}
size="xs"
display="flex"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
</HStack>
)}
</HStack>
{function_call ? (
<FunctionCallEditor
function_call={function_call}
onEdit={(function_call) => onEdit({ role, function_call, content: null })}
/>
) : (
<AutoResizeTextArea
value={content || JSON.stringify(function_call, null, 2)}
onChange={(e) => onEdit({ role, content: e.target.value })}
bgColor="white"
/>
)}
</VStack>
);
};
export default EditableMessage;

View File

@@ -1,24 +0,0 @@
import { type DatasetEntryType } from "@prisma/client";
import InputDropdown from "~/components/InputDropdown";
const ENTRY_TYPE_OPTIONS: DatasetEntryType[] = ["TRAIN", "TEST"];
const EntryTypeDropdown = ({
type,
onTypeChange,
}: {
type: DatasetEntryType;
onTypeChange: (type: DatasetEntryType) => void;
}) => {
return (
<InputDropdown
options={ENTRY_TYPE_OPTIONS}
selectedOption={type}
onSelect={onTypeChange}
inputGroupProps={{ w: "32", bgColor: "white" }}
/>
);
};
export default EntryTypeDropdown;

View File

@@ -1,125 +0,0 @@
import { useRef, useMemo, useEffect } from "react";
import { VStack, HStack, Text, Input, Box } from "@chakra-ui/react";
import { type CreateChatCompletionRequestMessage } from "openai/resources/chat";
import { useAppStore } from "~/state/store";
import { type CreatedEditor } from "~/state/sharedVariantEditor.slice";
const FunctionCallEditor = ({
function_call,
onEdit,
}: {
function_call: CreateChatCompletionRequestMessage.FunctionCall;
onEdit: (function_call: CreateChatCompletionRequestMessage.FunctionCall) => void;
}) => {
const monaco = useAppStore.use.sharedArgumentsEditor.monaco();
const editorRef = useRef<CreatedEditor | null>(null);
const editorId = useMemo(() => `editor_${Math.random().toString(36).substring(7)}`, []);
useEffect(() => {
if (monaco) {
const container = document.getElementById(editorId) as HTMLElement;
const editor = monaco.editor.create(container, {
value: function_call.arguments,
language: "json",
theme: "customTheme",
lineNumbers: "off",
minimap: { enabled: false },
wrappingIndent: "indent",
wrappingStrategy: "advanced",
wordWrap: "on",
folding: false,
scrollbar: {
alwaysConsumeMouseWheel: false,
verticalScrollbarSize: 0,
},
wordWrapBreakAfterCharacters: "",
wordWrapBreakBeforeCharacters: "",
quickSuggestions: true,
renderLineHighlight: "none",
fontSize: 14,
scrollBeyondLastLine: false,
});
editorRef.current = editor;
const updateHeight = () => {
const contentHeight = editor.getContentHeight();
container.style.height = `${contentHeight}px`;
editor.layout();
};
const attemptDocumentFormat = () => {
const action = editor.getAction("editor.action.formatDocument");
if (action) {
action
.run()
.then(updateHeight)
.catch((error) => {
console.error("Error running formatDocument:", error);
});
return true;
}
return false;
};
editor.onDidBlurEditorText(() => {
attemptDocumentFormat();
onEdit({ name: function_call.name, arguments: editor.getValue() });
});
// Interval function to check for action availability
const checkForActionInterval = setInterval(() => {
const formatted = attemptDocumentFormat();
if (formatted) {
clearInterval(checkForActionInterval); // Clear the interval once the action is found and run
}
}, 100); // Check every 100ms
// Add content change listener
const contentChangeListener = editor.onDidChangeModelContent(updateHeight);
const resizeObserver = new ResizeObserver(() => {
editor.layout();
});
resizeObserver.observe(container);
return () => {
contentChangeListener.dispose();
resizeObserver.disconnect();
editor?.dispose();
};
}
}, [monaco, editorId, function_call.name, function_call.arguments, onEdit]);
return (
<VStack w="full" alignItems="flex-start">
<HStack w="full">
<Text fontWeight="bold" w={192}>
Name:
</Text>
<Input
value={function_call.name}
onChange={(e) => onEdit({ name: e.target.value, arguments: function_call.arguments })}
bgColor="white"
/>
</HStack>
<Text fontWeight="bold" w={32}>
Arguments
</Text>
<VStack
borderRadius={4}
border="1px solid"
borderColor="gray.200"
w="full"
py={1}
bgColor="white"
>
<Box id={editorId} w="full" />
</VStack>
</VStack>
);
};
export default FunctionCallEditor;

View File

@@ -1,128 +0,0 @@
import { Box, Td, Tr, Thead, Th, Tooltip, HStack, Text, Checkbox } from "@chakra-ui/react";
import Link from "next/link";
import dayjs from "~/utils/dayjs";
import { type RouterOutputs } from "~/utils/api";
import { useAppStore } from "~/state/store";
import { useIsClientRehydrated, useDatasetEntries } from "~/utils/hooks";
import { useMemo } from "react";
type DatasetEntry = RouterOutputs["datasetEntries"]["list"]["entries"][0];
export const TableHeader = () => {
const matchingDatasetEntryIds = useDatasetEntries().data?.matchingEntryIds;
const selectedDatasetEntryIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
const addSelectedIds = useAppStore((s) => s.selectedDatasetEntries.addSelectedIds);
const clearSelectedIds = useAppStore((s) => s.selectedDatasetEntries.clearSelectedIds);
const allSelected = useMemo(() => {
if (!matchingDatasetEntryIds || !matchingDatasetEntryIds.length) return false;
return matchingDatasetEntryIds.every((id) => selectedDatasetEntryIds.has(id));
}, [matchingDatasetEntryIds, selectedDatasetEntryIds]);
const isClientRehydrated = useIsClientRehydrated();
if (!isClientRehydrated) return null;
return (
<Thead>
<Tr>
<Th pr={0}>
<HStack minW={16}>
<Checkbox
isChecked={allSelected}
onChange={() => {
allSelected ? clearSelectedIds() : addSelectedIds(matchingDatasetEntryIds || []);
}}
/>
<Text>
({selectedDatasetEntryIds.size ? `${selectedDatasetEntryIds.size}/` : ""}
{matchingDatasetEntryIds?.length || 0})
</Text>
</HStack>
</Th>
<Th>Created At</Th>
<Th isNumeric>Input tokens</Th>
<Th isNumeric>Output tokens</Th>
<Th isNumeric>Type</Th>
</Tr>
</Thead>
);
};
export const TableRow = ({
datasetEntry,
onToggle,
showOptions,
}: {
datasetEntry: DatasetEntry;
onToggle: () => void;
showOptions?: boolean;
}) => {
const createdAt = dayjs(datasetEntry.createdAt).format("MMMM D h:mm A");
const fullTime = dayjs(datasetEntry.createdAt).toString();
const isChecked = useAppStore((s) => s.selectedDatasetEntries.selectedIds.has(datasetEntry.id));
const toggleChecked = useAppStore((s) => s.selectedDatasetEntries.toggleSelectedId);
const isClientRehydrated = useIsClientRehydrated();
if (!isClientRehydrated) return null;
return (
<Tr
onClick={onToggle}
key={datasetEntry.id}
_hover={{ bgColor: "gray.50", cursor: "pointer" }}
fontSize="sm"
>
{showOptions && (
<Td>
<Checkbox isChecked={isChecked} onChange={() => toggleChecked(datasetEntry.id)} />
</Td>
)}
<Td>
<Tooltip label={fullTime} placement="top">
<Box whiteSpace="nowrap" minW="120px">
{createdAt}
</Box>
</Tooltip>
</Td>
<Td isNumeric>{datasetEntry.inputTokens}</Td>
<Td isNumeric>{datasetEntry.outputTokens}</Td>
<Td isNumeric>{datasetEntry.type}</Td>
</Tr>
);
};
export const EmptyTableRow = ({ filtersApplied = true }: { filtersApplied?: boolean }) => {
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
const filters = useAppStore((state) => state.logFilters.filters);
const { isLoading } = useDatasetEntries();
if (isLoading) return null;
if (filters.length && filtersApplied) {
return (
<Tr>
<Td w="full" colSpan={visibleColumns.size + 1}>
<Text color="gray.500" textAlign="center" w="full" p={4}>
No matching entries found. Try removing some filters.
</Text>
</Td>
</Tr>
);
}
return (
<Tr>
<Td w="full" colSpan={visibleColumns.size + 1}>
<Text color="gray.500" textAlign="center" w="full" p={4}>
This dataset has no entries. Add some logs in the{" "}
<Link href="/request-logs">
<Text as="span" color="blue.600">
Request Logs
</Text>
</Link>{" "}
tab.
</Text>
</Td>
</Tr>
);
};

View File

@@ -1,16 +0,0 @@
import { type StackProps } from "@chakra-ui/react";
import { useDatasetEntries } from "~/utils/hooks";
import Paginator from "../Paginator";
const DatasetEntryPaginator = (props: StackProps) => {
const { data } = useDatasetEntries();
if (!data) return null;
const { matchingEntryIds } = data;
return <Paginator count={matchingEntryIds.length} {...props} />;
};
export default DatasetEntryPaginator;

View File

@@ -1,20 +0,0 @@
import { Button, HStack, Icon, Text } from "@chakra-ui/react";
import { useDataset } from "~/utils/hooks";
import { BsGearFill } from "react-icons/bs";
export const DatasetHeaderButtons = ({ openDrawer }: { openDrawer: () => void }) => {
const dataset = useDataset();
if (dataset.isLoading) return null;
return (
<HStack spacing={0} mt={{ base: 2, md: 0 }}>
<Button variant={{ base: "solid", md: "ghost" }} onClick={openDrawer}>
<HStack>
<Icon as={BsGearFill} />
<Text>Configure</Text>
</HStack>
</Button>
</HStack>
);
};

View File

@@ -1,52 +0,0 @@
import { Card, Table, Thead, Tr, Th, Tbody, Td, VStack, Icon, Text } from "@chakra-ui/react";
import { FaTable } from "react-icons/fa";
import Link from "next/link";
import dayjs from "~/utils/dayjs";
import { useDatasets } from "~/utils/hooks";
const DatasetsTable = ({}) => {
const { data } = useDatasets();
const datasets = data || [];
return (
<Card width="100%" overflowX="auto">
{datasets.length ? (
<Table>
<Thead>
<Tr>
<Th>Name</Th>
<Th>Created At</Th>
<Th>Size</Th>
</Tr>
</Thead>
<Tbody>
{datasets.map((dataset) => {
return (
<Tr key={dataset.id}>
<Td>
<Link href={{ pathname: "/datasets/[id]", query: { id: dataset.id } }}>
<Text color="blue.600">{dataset.name}</Text>
</Link>
</Td>
<Td>{dayjs(dataset.createdAt).format("MMMM D h:mm A")}</Td>
<Td>{dataset._count.datasetEntries}</Td>
</Tr>
);
})}
</Tbody>
</Table>
) : (
<VStack py={8}>
<Icon as={FaTable} boxSize={16} color="gray.300" />
<Text color="gray.400" fontSize="lg" fontWeight="bold">
No Datasets Found. Create your first dataset.
</Text>
</VStack>
)}
</Card>
);
};
export default DatasetsTable;

View File

@@ -1,107 +0,0 @@
import {
Modal,
ModalOverlay,
ModalContent,
ModalHeader,
ModalCloseButton,
ModalBody,
ModalFooter,
HStack,
VStack,
Icon,
Text,
Button,
useDisclosure,
type UseDisclosureReturn,
} from "@chakra-ui/react";
import { BsTrash } from "react-icons/bs";
import { useHandledAsyncCallback, useDataset } from "~/utils/hooks";
import { api } from "~/utils/api";
import { useAppStore } from "~/state/store";
import ActionButton from "../ActionButton";
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
import pluralize from "pluralize";
const DeleteButton = () => {
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
const disclosure = useDisclosure();
return (
<>
<ActionButton
onClick={disclosure.onOpen}
label="Delete"
icon={BsTrash}
isDisabled={selectedIds.size === 0}
requireBeta
/>
<DeleteDatasetEntriesModal disclosure={disclosure} />
</>
);
};
export default DeleteButton;
const DeleteDatasetEntriesModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
const dataset = useDataset().data;
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
const clearSelectedIds = useAppStore((s) => s.selectedDatasetEntries.clearSelectedIds);
const deleteRowsMutation = api.datasetEntries.delete.useMutation();
const utils = api.useContext();
const [deleteRows, deletionInProgress] = useHandledAsyncCallback(async () => {
if (!dataset?.id || !selectedIds.size) return;
// divide selectedIds into chunks of 15000 to reduce request size
const chunkSize = 15000;
const idsArray = Array.from(selectedIds);
for (let i = 0; i < idsArray.length; i += chunkSize) {
const response = await deleteRowsMutation.mutateAsync({
ids: idsArray.slice(i, i + chunkSize),
});
if (maybeReportError(response)) return;
}
await utils.datasetEntries.list.invalidate();
disclosure.onClose();
clearSelectedIds();
}, [deleteRowsMutation, dataset, selectedIds, utils]);
return (
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={BsTrash} />
<Text>Delete Logs</Text>
</HStack>
</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
<Text>
Are you sure you want to delete the <b>{selectedIds.size}</b>{" "}
{pluralize("row", selectedIds.size)} rows you've selected?
</Text>
</VStack>
</ModalBody>
<ModalFooter>
<HStack>
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
Cancel
</Button>
<Button colorScheme="red" onClick={deleteRows} isLoading={deletionInProgress} minW={24}>
Delete
</Button>
</HStack>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -1,182 +0,0 @@
import { useState, useEffect } from "react";
import {
Modal,
ModalOverlay,
ModalContent,
ModalHeader,
ModalCloseButton,
ModalBody,
ModalFooter,
HStack,
VStack,
Icon,
Text,
Button,
Checkbox,
NumberInput,
NumberInputField,
NumberInputStepper,
NumberIncrementStepper,
NumberDecrementStepper,
Collapse,
Flex,
useDisclosure,
type UseDisclosureReturn,
} from "@chakra-ui/react";
import { AiOutlineDownload } from "react-icons/ai";
import { useHandledAsyncCallback, useDataset } from "~/utils/hooks";
import { api } from "~/utils/api";
import { useAppStore } from "~/state/store";
import ActionButton from "../ActionButton";
import { FiChevronUp, FiChevronDown } from "react-icons/fi";
import InfoCircle from "../InfoCircle";
const ExportButton = () => {
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
const disclosure = useDisclosure();
return (
<>
<ActionButton
onClick={disclosure.onOpen}
label="Download"
icon={AiOutlineDownload}
isDisabled={selectedIds.size === 0}
requireBeta
/>
<ExportDatasetEntriesModal disclosure={disclosure} />
</>
);
};
export default ExportButton;
const ExportDatasetEntriesModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
const dataset = useDataset().data;
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
const clearSelectedIds = useAppStore((s) => s.selectedDatasetEntries.clearSelectedIds);
const [testingSplit, setTestingSplit] = useState(10);
const [removeDuplicates, setRemoveDuplicates] = useState(false);
const [showAdvancedOptions, setShowAdvancedOptions] = useState(false);
useEffect(() => {
if (disclosure.isOpen) {
setTestingSplit(10);
setRemoveDuplicates(false);
}
}, [disclosure.isOpen]);
const exportDataMutation = api.datasetEntries.export.useMutation();
const [exportData, exportInProgress] = useHandledAsyncCallback(async () => {
if (!dataset?.id || !selectedIds.size || !testingSplit) return;
const response = await exportDataMutation.mutateAsync({
datasetId: dataset.id,
datasetEntryIds: Array.from(selectedIds),
testingSplit,
removeDuplicates,
});
const dataUrl = `data:application/pdf;base64,${response}`;
const blob = await fetch(dataUrl).then((res) => res.blob());
const url = URL.createObjectURL(blob);
const a = document.createElement("a");
a.href = url;
a.download = `data.zip`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
disclosure.onClose();
clearSelectedIds();
}, [exportDataMutation, dataset, selectedIds, testingSplit, removeDuplicates]);
return (
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={AiOutlineDownload} />
<Text>Export Logs</Text>
</HStack>
</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
<Text>
We'll export the <b>{selectedIds.size}</b> rows you have selected in the OpenAI
training format.
</Text>
<VStack alignItems="flex-start" spacing={4}>
<Flex
flexDir={{ base: "column", md: "row" }}
alignItems={{ base: "flex-start", md: "center" }}
>
<HStack w={48} alignItems="center" spacing={1}>
<Text fontWeight="bold">Testing Split:</Text>
<InfoCircle tooltipText="The percent of your logs that will be reserved for testing and saved in another file. Logs are split randomly." />
</HStack>
<HStack>
<NumberInput
defaultValue={10}
onChange={(_, num) => setTestingSplit(num)}
min={1}
max={100}
w={48}
>
<NumberInputField />
<NumberInputStepper>
<NumberIncrementStepper />
<NumberDecrementStepper />
</NumberInputStepper>
</NumberInput>
</HStack>
</Flex>
</VStack>
<VStack alignItems="flex-start" spacing={0}>
<Button
variant="unstyled"
color="blue.600"
onClick={() => setShowAdvancedOptions(!showAdvancedOptions)}
>
<HStack>
<Text>Advanced Options</Text>
<Icon as={showAdvancedOptions ? FiChevronUp : FiChevronDown} />
</HStack>
</Button>
<Collapse in={showAdvancedOptions} unmountOnExit={true}>
<VStack align="stretch" pt={4}>
<HStack>
<Checkbox
colorScheme="blue"
isChecked={removeDuplicates}
onChange={(e) => setRemoveDuplicates(e.target.checked)}
>
<Text>Remove duplicates</Text>
</Checkbox>
<InfoCircle tooltipText="To avoid overfitting and speed up training, automatically deduplicate logs with matching input and output." />
</HStack>
</VStack>
</Collapse>
</VStack>
</VStack>
</ModalBody>
<ModalFooter>
<HStack>
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
Cancel
</Button>
<Button colorScheme="blue" onClick={exportData} isLoading={exportInProgress} minW={24}>
Download
</Button>
</HStack>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -1,21 +0,0 @@
import { RiFlaskLine } from "react-icons/ri";
import { useAppStore } from "~/state/store";
import ActionButton from "../ActionButton";
const ExperimentButton = () => {
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
return (
<ActionButton
onClick={() => {
console.log("experimenting with these ids", selectedIds);
}}
label="Experiment"
icon={RiFlaskLine}
isDisabled={selectedIds.size === 0}
requireBeta
/>
);
};
export default ExperimentButton;

View File

@@ -1,148 +0,0 @@
import { useState, useEffect } from "react";
import { VStack, HStack, Button, Text, Progress, IconButton, Portal } from "@chakra-ui/react";
import { BsX } from "react-icons/bs";
import { type RouterOutputs, api } from "~/utils/api";
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
import { formatFileSize } from "~/utils/utils";
type FileUpload = RouterOutputs["datasets"]["listFileUploads"][0];
const FileUploadsCard = () => {
const dataset = useDataset();
const [fileUploadsRefetchInterval, setFileUploadsRefetchInterval] = useState<number>(500);
const fileUploads = api.datasets.listFileUploads.useQuery(
{ datasetId: dataset.data?.id as string },
{ enabled: !!dataset.data?.id, refetchInterval: fileUploadsRefetchInterval },
);
useEffect(() => {
if (fileUploads?.data?.some((fu) => fu.status !== "COMPLETE" && fu.status !== "ERROR")) {
setFileUploadsRefetchInterval(500);
} else {
setFileUploadsRefetchInterval(15000);
}
}, [fileUploads]);
const utils = api.useContext();
const hideFileUploadsMutation = api.datasets.hideFileUploads.useMutation();
const [hideAllFileUploads] = useHandledAsyncCallback(async () => {
if (!fileUploads.data?.length) return;
await hideFileUploadsMutation.mutateAsync({
fileUploadIds: fileUploads.data.map((upload) => upload.id),
});
await utils.datasets.listFileUploads.invalidate();
}, [hideFileUploadsMutation, fileUploads.data, utils]);
if (!fileUploads.data?.length) return null;
return (
<Portal>
<VStack
w={72}
borderRadius={8}
position="fixed"
bottom={8}
right={8}
overflow="hidden"
borderWidth={1}
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);"
minW={0}
bgColor="white"
>
<HStack p={4} w="full" bgColor="gray.200" justifyContent="space-between">
<Text fontWeight="bold">Uploads</Text>
<IconButton
aria-label="Close uploads"
as={BsX}
boxSize={6}
minW={0}
variant="ghost"
onClick={hideAllFileUploads}
cursor="pointer"
/>
</HStack>
{fileUploads?.data?.map((upload) => <FileUploadRow key={upload.id} fileUpload={upload} />)}
</VStack>
</Portal>
);
};
export default FileUploadsCard;
const FileUploadRow = ({ fileUpload }: { fileUpload: FileUpload }) => {
const { id, fileName, fileSize, progress, status, errorMessage } = fileUpload;
const utils = api.useContext();
const hideFileUploadsMutation = api.datasets.hideFileUploads.useMutation();
const [hideFileUpload, hidingInProgress] = useHandledAsyncCallback(async () => {
await hideFileUploadsMutation.mutateAsync({ fileUploadIds: [id] });
}, [id, hideFileUploadsMutation, utils]);
const [refreshDatasetEntries] = useHandledAsyncCallback(async () => {
await hideFileUploadsMutation.mutateAsync({ fileUploadIds: [id] });
await utils.datasets.listFileUploads.invalidate();
await utils.datasetEntries.list.invalidate();
}, [id, hideFileUploadsMutation, utils]);
return (
<VStack w="full" alignItems="flex-start" p={4} borderBottomWidth={1}>
<HStack w="full" justifyContent="space-between" alignItems="flex-start">
<VStack alignItems="flex-start" spacing={0}>
<Text fontWeight="bold">{fileName}</Text>
<Text fontSize="xs">({formatFileSize(fileSize, 2)})</Text>
</VStack>
<HStack spacing={0}>
{status === "COMPLETE" ? (
<Button variant="ghost" onClick={refreshDatasetEntries} color="orange.400" size="xs">
Refresh Table
</Button>
) : (
<IconButton
aria-label="Hide file upload"
as={BsX}
boxSize={6}
minW={0}
variant="ghost"
isLoading={hidingInProgress}
onClick={hideFileUpload}
cursor="pointer"
/>
)}
</HStack>
</HStack>
{errorMessage ? (
<Text alignSelf="center" pt={2}>
{errorMessage}
</Text>
) : (
<>
<Text alignSelf="center" fontSize="xs">
{getStatusText(status)}
</Text>
<Progress w="full" value={progress} borderRadius={2} />
</>
)}
</VStack>
);
};
const getStatusText = (status: FileUpload["status"]) => {
switch (status) {
case "PENDING":
return "Pending";
case "DOWNLOADING":
return "Downloading to Server";
case "PROCESSING":
return "Processing";
case "SAVING":
return "Saving";
case "COMPLETE":
return "Complete";
case "ERROR":
return "Error";
}
};

View File

@@ -1,161 +0,0 @@
import { useState, useEffect } from "react";
import {
Modal,
ModalOverlay,
ModalContent,
ModalHeader,
ModalCloseButton,
ModalBody,
ModalFooter,
HStack,
VStack,
Icon,
Text,
Button,
useDisclosure,
type UseDisclosureReturn,
Input,
} from "@chakra-ui/react";
import { AiTwotoneThunderbolt } from "react-icons/ai";
import humanId from "human-id";
import { useRouter } from "next/router";
import { useDataset, useDatasetEntries, useHandledAsyncCallback } from "~/utils/hooks";
import { api } from "~/utils/api";
import ActionButton from "../ActionButton";
import InputDropdown from "../InputDropdown";
// import { FiChevronDown } from "react-icons/fi";
const SUPPORTED_BASE_MODELS = ["llama2-7b", "llama2-13b", "llama2-70b", "gpt-3.5-turbo"];
const FineTuneButton = () => {
const datasetEntries = useDatasetEntries().data;
const numEntries = datasetEntries?.matchingEntryIds.length || 0;
const disclosure = useDisclosure();
return (
<>
<ActionButton
onClick={disclosure.onOpen}
label="Fine Tune"
icon={AiTwotoneThunderbolt}
isDisabled={numEntries === 0}
requireBeta
/>
<FineTuneModal disclosure={disclosure} />
</>
);
};
export default FineTuneButton;
const FineTuneModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
const dataset = useDataset().data;
const datasetEntries = useDatasetEntries().data;
const [selectedBaseModel, setSelectedBaseModel] = useState(SUPPORTED_BASE_MODELS[0]);
const [modelSlug, setModelSlug] = useState(humanId({ separator: "-", capitalize: false }));
useEffect(() => {
if (disclosure.isOpen) {
setSelectedBaseModel(SUPPORTED_BASE_MODELS[0]);
setModelSlug(humanId({ separator: "-", capitalize: false }));
}
}, [disclosure.isOpen]);
const utils = api.useContext();
const router = useRouter();
const createFineTuneMutation = api.fineTunes.create.useMutation();
const [createFineTune, creationInProgress] = useHandledAsyncCallback(async () => {
if (!modelSlug || !selectedBaseModel || !dataset) return;
await createFineTuneMutation.mutateAsync({
slug: modelSlug,
baseModel: selectedBaseModel,
datasetId: dataset.id,
});
await utils.fineTunes.list.invalidate();
await router.push({ pathname: "/fine-tunes" });
disclosure.onClose();
}, [createFineTuneMutation, modelSlug, selectedBaseModel]);
return (
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Icon as={AiTwotoneThunderbolt} />
<Text>Fine Tune</Text>
</HStack>
</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
<Text>
We'll train on <b>{datasetEntries?.trainingCount}</b> and test on{" "}
<b>{datasetEntries?.testingCount}</b> entries in this dataset.
</Text>
<VStack>
<HStack spacing={2} w="full">
<Text fontWeight="bold" w={36}>
Model ID:
</Text>
<Input
value={modelSlug}
onChange={(e) => setModelSlug(e.target.value)}
w={48}
placeholder="unique-id"
onKeyDown={(e) => {
// If the user types anything other than a-z, A-Z, or 0-9, replace it with -
if (!/[a-zA-Z0-9]/.test(e.key)) {
e.preventDefault();
setModelSlug((s) => s && `${s}-`);
}
}}
/>
</HStack>
<HStack spacing={2}>
<Text fontWeight="bold" w={36}>
Base model:
</Text>
<InputDropdown
options={SUPPORTED_BASE_MODELS}
selectedOption={selectedBaseModel}
onSelect={(option) => setSelectedBaseModel(option)}
inputGroupProps={{ w: 48 }}
/>
</HStack>
</VStack>
{/* <Button variant="unstyled" color="blue.600">
<HStack>
<Text>Advanced Options</Text>
<Icon as={FiChevronDown} />
</HStack>
</Button> */}
</VStack>
</ModalBody>
<ModalFooter>
<HStack>
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
Cancel
</Button>
<Button
colorScheme="blue"
onClick={createFineTune}
isLoading={creationInProgress}
minW={24}
isDisabled={!modelSlug}
>
Start Training
</Button>
</HStack>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -1,276 +0,0 @@
import { useState, useEffect, useRef, useCallback } from "react";
import {
Modal,
ModalOverlay,
ModalContent,
ModalHeader,
ModalCloseButton,
ModalBody,
ModalFooter,
HStack,
VStack,
Icon,
Text,
Button,
Box,
useDisclosure,
type UseDisclosureReturn,
} from "@chakra-ui/react";
import pluralize from "pluralize";
import { AiOutlineCloudUpload, AiOutlineFile } from "react-icons/ai";
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
import { api } from "~/utils/api";
import ActionButton from "../ActionButton";
import { validateTrainingRows, type TrainingRow, parseJSONL } from "./validateTrainingRows";
import { uploadDatasetEntryFile } from "~/utils/azure/website";
import { formatFileSize } from "~/utils/utils";
const UploadDataButton = () => {
const disclosure = useDisclosure();
return (
<>
<ActionButton
onClick={disclosure.onOpen}
label="Upload Data"
icon={AiOutlineCloudUpload}
iconBoxSize={4}
requireBeta
/>
<UploadDataModal disclosure={disclosure} />
</>
);
};
export default UploadDataButton;
const UploadDataModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
const dataset = useDataset().data;
const [validationError, setValidationError] = useState<string | null>(null);
const [trainingRows, setTrainingRows] = useState<TrainingRow[] | null>(null);
const [file, setFile] = useState<File | null>(null);
const fileInputRef = useRef<HTMLInputElement>(null);
const handleFileDrop = (e: React.DragEvent<HTMLDivElement>) => {
e.preventDefault();
const files = e.dataTransfer.files;
if (files.length > 0) {
processFile(files[0] as File);
}
};
const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
const files = e.target.files;
if (files && files.length > 0) {
processFile(files[0] as File);
}
};
const processFile = (file: File) => {
setFile(file);
// skip reading if file is larger than 10MB
if (file.size > 10000000) {
setTrainingRows(null);
return;
}
const reader = new FileReader();
reader.onload = (e: ProgressEvent<FileReader>) => {
const content = e.target?.result as string;
// Process the content, e.g., set to state
let parsedJSONL;
try {
parsedJSONL = parseJSONL(content) as TrainingRow[];
const validationError = validateTrainingRows(parsedJSONL);
if (validationError) {
setValidationError(validationError);
setTrainingRows(null);
return;
}
setTrainingRows(parsedJSONL);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (e: any) {
setValidationError("Unable to parse JSONL file: " + (e.message as string));
setTrainingRows(null);
return;
}
};
reader.readAsText(file);
};
const resetState = useCallback(() => {
setValidationError(null);
setTrainingRows(null);
setFile(null);
}, [setValidationError, setTrainingRows, setFile]);
useEffect(() => {
if (disclosure.isOpen) {
resetState();
}
}, [disclosure.isOpen, resetState]);
const triggerFileDownloadMutation = api.datasets.triggerFileDownload.useMutation();
const utils = api.useContext();
const [sendJSONL, sendingInProgress] = useHandledAsyncCallback(async () => {
if (!dataset || !file) return;
const blobName = await uploadDatasetEntryFile(file);
await triggerFileDownloadMutation.mutateAsync({
datasetId: dataset.id,
blobName,
fileName: file.name,
fileSize: file.size,
});
await utils.datasets.listFileUploads.invalidate();
disclosure.onClose();
}, [dataset, trainingRows, triggerFileDownloadMutation, file, utils]);
return (
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Text>Upload Training Logs</Text>
</HStack>
</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset" p={8}>
<Box w="full" aspectRatio={1.5}>
{validationError && (
<VStack w="full" h="full" justifyContent="center" spacing={8}>
<Icon as={AiOutlineFile} boxSize={24} color="gray.300" />
<VStack w="full">
<Text fontSize={32} color="gray.500" fontWeight="bold">
Error
</Text>
<Text color="gray.500">{validationError}</Text>
</VStack>
<Text
as="span"
textDecor="underline"
color="gray.500"
_hover={{ color: "orange.400" }}
cursor="pointer"
onClick={resetState}
>
Try again
</Text>
</VStack>
)}
{!validationError && !file && (
<VStack
w="full"
h="full"
stroke="gray.300"
justifyContent="center"
borderRadius={8}
sx={{
"background-image": `url("data:image/svg+xml,%3csvg width='100%25' height='100%25' xmlns='http://www.w3.org/2000/svg'%3e%3crect x='2%25' y='2%25' width='96%25' height='96%25' fill='none' stroke='%23eee' stroke-width='4' stroke-dasharray='6%2c 14' stroke-dashoffset='0' stroke-linecap='square' rx='8' ry='8'/%3e%3c/svg%3e")`,
}}
onDragOver={(e) => e.preventDefault()}
onDrop={handleFileDrop}
>
<JsonFileIcon />
<Icon as={AiOutlineCloudUpload} boxSize={24} color="gray.300" />
<Text fontSize={32} color="gray.500" fontWeight="bold">
Drag & Drop
</Text>
<Text color="gray.500">
your .jsonl file here, or{" "}
<input
type="file"
ref={fileInputRef}
onChange={handleFileChange}
style={{ display: "none" }}
accept=".jsonl"
/>
<Text
as="span"
textDecor="underline"
_hover={{ color: "orange.400" }}
cursor="pointer"
onClick={() => fileInputRef.current?.click()}
>
browse
</Text>
</Text>
</VStack>
)}
{!validationError && file && (
<VStack w="full" h="full" justifyContent="center" spacing={8}>
<JsonFileIcon />
<VStack w="full">
{trainingRows ? (
<>
<Text fontSize={32} color="gray.500" fontWeight="bold">
Success
</Text>
<Text color="gray.500">
We'll upload <b>{trainingRows.length}</b>{" "}
{pluralize("row", trainingRows.length)} into <b>{dataset?.name}</b>.{" "}
</Text>
</>
) : (
<>
<Text fontSize={32} color="gray.500" fontWeight="bold">
{file.name}
</Text>
<Text color="gray.500">{formatFileSize(file.size)}</Text>
</>
)}
</VStack>
<Text
as="span"
textDecor="underline"
color="gray.500"
_hover={{ color: "orange.400" }}
cursor="pointer"
onClick={resetState}
>
Change file
</Text>
</VStack>
)}
</Box>
</ModalBody>
<ModalFooter>
<HStack>
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
Cancel
</Button>
<Button
colorScheme="orange"
onClick={sendJSONL}
isLoading={sendingInProgress}
minW={24}
isDisabled={!file || !!validationError}
>
Upload
</Button>
</HStack>
</ModalFooter>
</ModalContent>
</Modal>
);
};
const JsonFileIcon = () => (
<Box position="relative" display="flex" alignItems="center" justifyContent="center">
<Icon as={AiOutlineFile} boxSize={24} color="gray.300" />
<Text position="absolute" color="orange.400" fontWeight="bold" fontSize={12} pt={4}>
JSONL
</Text>
</Box>
);

Some files were not shown because too many files have changed in this diff Show More