Compare commits
47 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
db69b8e496 | ||
|
|
38e28fa30a | ||
|
|
b4cb931f6c | ||
|
|
40638a7848 | ||
|
|
14eae45d18 | ||
|
|
13bac46e0b | ||
|
|
12d01cd3d5 | ||
|
|
ec59252010 | ||
|
|
87e2339df2 | ||
|
|
75ad6619a5 | ||
|
|
4b8941d53a | ||
|
|
0d691d17cc | ||
|
|
815d4faad2 | ||
|
|
9632ccbc71 | ||
|
|
a4131e4a10 | ||
|
|
db1c8f171d | ||
|
|
678392ef17 | ||
|
|
af722128e8 | ||
|
|
50a79b6e3a | ||
|
|
f59150ff5b | ||
|
|
b58e0a8d54 | ||
|
|
dc82a3fa82 | ||
|
|
fedbf5784e | ||
|
|
888c04af50 | ||
|
|
1b36453051 | ||
|
|
2f37b3ed87 | ||
|
|
8fa7b691db | ||
|
|
17866a5249 | ||
|
|
947eba3216 | ||
|
|
ef1f9458f4 | ||
|
|
c6c7e746ee | ||
|
|
3be0a90960 | ||
|
|
9b1f2ac30a | ||
|
|
1b394cc72b | ||
|
|
26b9731bab | ||
|
|
7c8ec8f6a7 | ||
|
|
10dd53e7f6 | ||
|
|
b1802fc04b | ||
|
|
f2135ddc72 | ||
|
|
ca89eafb0b | ||
|
|
b50d47beaf | ||
|
|
733d53625b | ||
|
|
a5e59e4235 | ||
|
|
d0102e3202 | ||
|
|
bd571c4c4e | ||
|
|
296eb23d97 | ||
|
|
072dcee376 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,3 +3,4 @@
|
||||
*.pyc
|
||||
node_modules/
|
||||
*.tsbuildinfo
|
||||
dist/
|
||||
16
README.md
16
README.md
@@ -1,10 +1,8 @@
|
||||
<!-- <img src="https://github.com/openpipe/openpipe/assets/41524992/ca59596e-eb80-40f9-921f-6d67f6e6d8fa" width="72px" /> -->
|
||||
|
||||
# OpenPipe
|
||||
|
||||
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts, and can automatically [translate](#-translate-between-model-apis) those prompts between models.
|
||||
|
||||
<img src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="demo">
|
||||
<img src="https://github.com/openpipe/openpipe/assets/41524992/66bb1843-cb72-4130-a369-eec2df3b8201" alt="demo">
|
||||
|
||||
You can use our hosted version of OpenPipe at https://openpipe.ai. You can also clone this repository and [run it locally](#running-locally).
|
||||
|
||||
@@ -37,25 +35,19 @@ OpenPipe lets you _template_ a prompt. Use the templating feature to run the pro
|
||||
|
||||
Write your prompt in one format and automatically convert it to work with any other model.
|
||||
|
||||
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/1e19ccf2-96b6-4e93-a3a5-1449710d1b5b" alt="translate between models">
|
||||
|
||||
<br><br>
|
||||
<!-- <img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/1e19ccf2-96b6-4e93-a3a5-1449710d1b5b" alt="translate between models"> -->
|
||||
|
||||
### 🛠️ Refine Your Prompts Automatically
|
||||
|
||||
Use a growing database of best-practice refinements to improve your prompts automatically.
|
||||
|
||||
<img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/87a27fe7-daef-445c-a5e2-1c82b23f9f99" alt="add function call">
|
||||
|
||||
<br><br>
|
||||
<!-- <img width="480" alt="Screenshot 2023-08-01 at 11 55 38 PM" src="https://github.com/OpenPipe/OpenPipe/assets/41524992/87a27fe7-daef-445c-a5e2-1c82b23f9f99" alt="add function call"> -->
|
||||
|
||||
### 🪄 Auto-generate Test Scenarios
|
||||
|
||||
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
|
||||
|
||||
<img width="600" src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="auto-generate">
|
||||
|
||||
<br><br>
|
||||
<!-- <img width="600" src="https://github.com/openpipe/openpipe/assets/41524992/219a844e-3f4e-4f6b-8066-41348b42977b" alt="auto-generate"> -->
|
||||
|
||||
## Running Locally
|
||||
|
||||
|
||||
5
app/@types/nextjs-routes.d.ts
vendored
5
app/@types/nextjs-routes.d.ts
vendored
@@ -19,10 +19,9 @@ declare module "nextjs-routes" {
|
||||
| DynamicRoute<"/api/v1/[...trpc]", { "trpc": string[] }>
|
||||
| StaticRoute<"/api/v1/openapi">
|
||||
| StaticRoute<"/dashboard">
|
||||
| DynamicRoute<"/data/[id]", { "id": string }>
|
||||
| StaticRoute<"/data">
|
||||
| DynamicRoute<"/experiments/[id]", { "id": string }>
|
||||
| DynamicRoute<"/experiments/[experimentSlug]", { "experimentSlug": string }>
|
||||
| StaticRoute<"/experiments">
|
||||
| StaticRoute<"/fine-tunes">
|
||||
| StaticRoute<"/">
|
||||
| DynamicRoute<"/invitations/[invitationToken]", { "invitationToken": string }>
|
||||
| StaticRoute<"/project/settings">
|
||||
|
||||
@@ -23,7 +23,6 @@ ARG NEXT_PUBLIC_SOCKET_URL
|
||||
ARG NEXT_PUBLIC_HOST
|
||||
ARG NEXT_PUBLIC_SENTRY_DSN
|
||||
ARG SENTRY_AUTH_TOKEN
|
||||
ARG NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS
|
||||
|
||||
WORKDIR /code
|
||||
COPY --from=deps /code/node_modules ./node_modules
|
||||
@@ -45,4 +44,4 @@ EXPOSE 3000
|
||||
ENV PORT 3000
|
||||
|
||||
# Run the "run-prod.sh" script
|
||||
CMD /code/app/run-prod.sh
|
||||
CMD /code/app/scripts/run-prod.sh
|
||||
@@ -12,8 +12,8 @@
|
||||
"build": "next build",
|
||||
"dev:next": "TZ=UTC next dev",
|
||||
"dev:wss": "pnpm tsx --watch src/wss-server.ts",
|
||||
"dev:worker": "NODE_ENV='development' pnpm tsx --watch src/server/tasks/worker.ts",
|
||||
"dev": "concurrently --kill-others 'pnpm dev:next' 'pnpm dev:wss' 'pnpm dev:worker'",
|
||||
"worker": "NODE_ENV='development' pnpm tsx --watch src/server/tasks/worker.ts",
|
||||
"dev": "concurrently --kill-others 'pnpm dev:next' 'pnpm dev:wss' 'pnpm worker --watch'",
|
||||
"postinstall": "prisma generate",
|
||||
"lint": "next lint",
|
||||
"start": "TZ=UTC next start",
|
||||
@@ -48,6 +48,7 @@
|
||||
"@trpc/react-query": "^10.26.0",
|
||||
"@trpc/server": "^10.26.0",
|
||||
"@vercel/og": "^0.5.9",
|
||||
"archiver": "^6.0.0",
|
||||
"ast-types": "^0.14.2",
|
||||
"chroma-js": "^2.4.2",
|
||||
"concurrently": "^8.2.0",
|
||||
@@ -60,6 +61,7 @@
|
||||
"framer-motion": "^10.12.17",
|
||||
"gpt-tokens": "^1.0.10",
|
||||
"graphile-worker": "^0.13.0",
|
||||
"human-id": "^4.0.0",
|
||||
"immer": "^10.0.2",
|
||||
"isolated-vm": "^4.5.0",
|
||||
"json-schema-to-typescript": "^13.0.2",
|
||||
@@ -98,6 +100,7 @@
|
||||
"replicate": "^0.12.3",
|
||||
"socket.io": "^4.7.1",
|
||||
"socket.io-client": "^4.7.1",
|
||||
"stream-buffers": "^3.0.2",
|
||||
"superjson": "1.12.2",
|
||||
"trpc-openapi": "^1.2.0",
|
||||
"tsx": "^3.12.7",
|
||||
@@ -110,6 +113,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@openapi-contrib/openapi-schema-to-json-schema": "^4.0.5",
|
||||
"@types/archiver": "^5.3.2",
|
||||
"@types/babel__core": "^7.20.1",
|
||||
"@types/babel__standalone": "^7.1.4",
|
||||
"@types/chroma-js": "^2.4.0",
|
||||
@@ -126,6 +130,7 @@
|
||||
"@types/react": "^18.2.6",
|
||||
"@types/react-dom": "^18.2.4",
|
||||
"@types/react-syntax-highlighter": "^15.5.7",
|
||||
"@types/stream-buffers": "^3.0.4",
|
||||
"@types/uuid": "^9.0.2",
|
||||
"@typescript-eslint/eslint-plugin": "^5.59.6",
|
||||
"@typescript-eslint/parser": "^5.59.6",
|
||||
|
||||
@@ -0,0 +1,88 @@
|
||||
/*
|
||||
* Copyright 2023 Viascom Ltd liab. Co
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
||||
|
||||
CREATE OR REPLACE FUNCTION nanoid(
|
||||
size int DEFAULT 21,
|
||||
alphabet text DEFAULT '_-0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
)
|
||||
RETURNS text
|
||||
LANGUAGE plpgsql
|
||||
volatile
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
idBuilder text := '';
|
||||
counter int := 0;
|
||||
bytes bytea;
|
||||
alphabetIndex int;
|
||||
alphabetArray text[];
|
||||
alphabetLength int;
|
||||
mask int;
|
||||
step int;
|
||||
BEGIN
|
||||
alphabetArray := regexp_split_to_array(alphabet, '');
|
||||
alphabetLength := array_length(alphabetArray, 1);
|
||||
mask := (2 << cast(floor(log(alphabetLength - 1) / log(2)) as int)) - 1;
|
||||
step := cast(ceil(1.6 * mask * size / alphabetLength) AS int);
|
||||
|
||||
while true
|
||||
loop
|
||||
bytes := gen_random_bytes(step);
|
||||
while counter < step
|
||||
loop
|
||||
alphabetIndex := (get_byte(bytes, counter) & mask) + 1;
|
||||
if alphabetIndex <= alphabetLength then
|
||||
idBuilder := idBuilder || alphabetArray[alphabetIndex];
|
||||
if length(idBuilder) = size then
|
||||
return idBuilder;
|
||||
end if;
|
||||
end if;
|
||||
counter := counter + 1;
|
||||
end loop;
|
||||
|
||||
counter := 0;
|
||||
end loop;
|
||||
END
|
||||
$$;
|
||||
|
||||
|
||||
-- Make a short_nanoid function that uses the default alphabet and length of 15
|
||||
CREATE OR REPLACE FUNCTION short_nanoid()
|
||||
RETURNS text
|
||||
LANGUAGE plpgsql
|
||||
volatile
|
||||
AS
|
||||
$$
|
||||
BEGIN
|
||||
RETURN nanoid(15, '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
|
||||
END
|
||||
$$;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "Experiment" ADD COLUMN "slug" TEXT NOT NULL DEFAULT short_nanoid();
|
||||
|
||||
-- For existing experiments, keep the existing id as the slug for backwards compatibility
|
||||
UPDATE "Experiment" SET "slug" = "id";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Experiment_slug_key" ON "Experiment"("slug");
|
||||
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `input` on the `DatasetEntry` table. All the data in the column will be lost.
|
||||
- You are about to drop the column `output` on the `DatasetEntry` table. All the data in the column will be lost.
|
||||
- Added the required column `loggedCallId` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
|
||||
|
||||
*/
|
||||
-- AlterTable
|
||||
ALTER TABLE "DatasetEntry" DROP COLUMN "input",
|
||||
DROP COLUMN "output",
|
||||
ADD COLUMN "loggedCallId" UUID NOT NULL;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "DatasetEntry" ADD CONSTRAINT "DatasetEntry_loggedCallId_fkey" FOREIGN KEY ("loggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCallModelResponse" ALTER COLUMN "cost" SET DATA TYPE DOUBLE PRECISION;
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "FineTuneStatus" AS ENUM ('PENDING', 'TRAINING', 'AWAITING_DEPLOYMENT', 'DEPLOYING', 'DEPLOYED', 'ERROR');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "FineTune" (
|
||||
"id" UUID NOT NULL,
|
||||
"slug" TEXT NOT NULL,
|
||||
"baseModel" TEXT NOT NULL,
|
||||
"status" "FineTuneStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"trainingStartedAt" TIMESTAMP(3),
|
||||
"trainingFinishedAt" TIMESTAMP(3),
|
||||
"deploymentStartedAt" TIMESTAMP(3),
|
||||
"deploymentFinishedAt" TIMESTAMP(3),
|
||||
"datasetId" UUID NOT NULL,
|
||||
"projectId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "FineTune_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "FineTune_slug_key" ON "FineTune"("slug");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "FineTune" ADD CONSTRAINT "FineTune_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "FineTune" ADD CONSTRAINT "FineTune_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -11,7 +11,9 @@ datasource db {
|
||||
}
|
||||
|
||||
model Experiment {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
slug String @unique @default(dbgenerated("short_nanoid()"))
|
||||
label String
|
||||
|
||||
sortIndex Int @default(0)
|
||||
@@ -179,6 +181,7 @@ model Dataset {
|
||||
|
||||
name String
|
||||
datasetEntries DatasetEntry[]
|
||||
fineTunes FineTune[]
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
@@ -190,8 +193,8 @@ model Dataset {
|
||||
model DatasetEntry {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
input String
|
||||
output String?
|
||||
loggedCallId String @db.Uuid
|
||||
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
|
||||
|
||||
datasetId String @db.Uuid
|
||||
dataset Dataset? @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
@@ -207,14 +210,15 @@ model Project {
|
||||
personalProjectUserId String? @unique @db.Uuid
|
||||
personalProjectUser User? @relation(fields: [personalProjectUserId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
projectUsers ProjectUser[]
|
||||
projectUserInvitations UserInvitation[]
|
||||
experiments Experiment[]
|
||||
datasets Dataset[]
|
||||
loggedCalls LoggedCall[]
|
||||
apiKeys ApiKey[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
projectUsers ProjectUser[]
|
||||
projectUserInvitations UserInvitation[]
|
||||
experiments Experiment[]
|
||||
datasets Dataset[]
|
||||
loggedCalls LoggedCall[]
|
||||
fineTunes FineTune[]
|
||||
apiKeys ApiKey[]
|
||||
}
|
||||
|
||||
enum ProjectUserRole {
|
||||
@@ -274,8 +278,9 @@ model LoggedCall {
|
||||
projectId String @db.Uuid
|
||||
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
model String?
|
||||
tags LoggedCallTag[]
|
||||
model String?
|
||||
tags LoggedCallTag[]
|
||||
datasetEntries DatasetEntry[]
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
@@ -310,7 +315,7 @@ model LoggedCallModelResponse {
|
||||
outputTokens Int?
|
||||
finishReason String?
|
||||
completionId String?
|
||||
cost Decimal? @db.Decimal(18, 12)
|
||||
cost Float?
|
||||
|
||||
// The LoggedCall that created this LoggedCallModelResponse
|
||||
originalLoggedCallId String @unique @db.Uuid
|
||||
@@ -324,10 +329,10 @@ model LoggedCallModelResponse {
|
||||
}
|
||||
|
||||
model LoggedCallTag {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String
|
||||
value String?
|
||||
projectId String @db.Uuid
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String
|
||||
value String?
|
||||
projectId String @db.Uuid
|
||||
|
||||
loggedCallId String @db.Uuid
|
||||
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
|
||||
@@ -391,12 +396,12 @@ model User {
|
||||
|
||||
role UserRole @default(USER)
|
||||
|
||||
accounts Account[]
|
||||
sessions Session[]
|
||||
projectUsers ProjectUser[]
|
||||
projects Project[]
|
||||
worldChampEntrant WorldChampEntrant?
|
||||
sentUserInvitations UserInvitation[]
|
||||
accounts Account[]
|
||||
sessions Session[]
|
||||
projectUsers ProjectUser[]
|
||||
projects Project[]
|
||||
worldChampEntrant WorldChampEntrant?
|
||||
sentUserInvitations UserInvitation[]
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @default(now()) @updatedAt
|
||||
@@ -405,17 +410,17 @@ model User {
|
||||
model UserInvitation {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
email String
|
||||
role ProjectUserRole
|
||||
invitationToken String @unique
|
||||
senderId String @db.Uuid
|
||||
sender User @relation(fields: [senderId], references: [id], onDelete: Cascade)
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
email String
|
||||
role ProjectUserRole
|
||||
invitationToken String @unique
|
||||
senderId String @db.Uuid
|
||||
sender User @relation(fields: [senderId], references: [id], onDelete: Cascade)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([projectId, email])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model VerificationToken {
|
||||
@@ -425,3 +430,33 @@ model VerificationToken {
|
||||
|
||||
@@unique([identifier, token])
|
||||
}
|
||||
|
||||
enum FineTuneStatus {
|
||||
PENDING
|
||||
TRAINING
|
||||
AWAITING_DEPLOYMENT
|
||||
DEPLOYING
|
||||
DEPLOYED
|
||||
ERROR
|
||||
}
|
||||
|
||||
model FineTune {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
slug String @unique
|
||||
baseModel String
|
||||
status FineTuneStatus @default(PENDING)
|
||||
trainingStartedAt DateTime?
|
||||
trainingFinishedAt DateTime?
|
||||
deploymentStartedAt DateTime?
|
||||
deploymentFinishedAt DateTime?
|
||||
|
||||
datasetId String @db.Uuid
|
||||
dataset Dataset @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
6
app/scripts/debug-prod.sh
Normal file
6
app/scripts/debug-prod.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")/.."
|
||||
apt-get update
|
||||
apt-get install -y htop psql
|
||||
@@ -10,6 +10,4 @@ pnpm tsx src/promptConstructor/migrate.ts
|
||||
|
||||
echo "Starting the server"
|
||||
|
||||
pnpm concurrently --kill-others \
|
||||
"pnpm start" \
|
||||
"pnpm tsx src/server/tasks/worker.ts"
|
||||
pnpm start
|
||||
10
app/scripts/run-workers-prod.sh
Executable file
10
app/scripts/run-workers-prod.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "Migrating the database"
|
||||
pnpm prisma migrate deploy
|
||||
|
||||
echo "Starting 4 workers"
|
||||
|
||||
pnpm concurrently "pnpm worker" "pnpm worker" "pnpm worker" "pnpm worker"
|
||||
13
app/scripts/test-docker.sh
Executable file
13
app/scripts/test-docker.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "Env is"
|
||||
echo $ENVIRONMENT
|
||||
|
||||
docker build . --file app/Dockerfile --tag "openpipe-prod"
|
||||
|
||||
# Run the image
|
||||
docker run --env-file app/.env -it --entrypoint "/bin/bash" "openpipe-prod"
|
||||
@@ -3,6 +3,7 @@
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { isError } from "lodash-es";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
if (env.NEXT_PUBLIC_SENTRY_DSN) {
|
||||
@@ -15,4 +16,10 @@ if (env.NEXT_PUBLIC_SENTRY_DSN) {
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
});
|
||||
} else {
|
||||
// Install local debug exception handler for rejected promises
|
||||
process.on("unhandledRejection", (reason) => {
|
||||
const reasonDetails = isError(reason) ? reason?.stack : reason;
|
||||
console.log("Unhandled Rejection at:", reasonDetails);
|
||||
});
|
||||
}
|
||||
|
||||
14
app/src/components/InfoCircle.tsx
Normal file
14
app/src/components/InfoCircle.tsx
Normal file
@@ -0,0 +1,14 @@
|
||||
import { Tooltip, Icon, VStack } from "@chakra-ui/react";
|
||||
import { RiInformationFill } from "react-icons/ri";
|
||||
|
||||
const InfoCircle = ({ tooltipText }: { tooltipText: string }) => {
|
||||
return (
|
||||
<Tooltip label={tooltipText} fontSize="sm" shouldWrapChildren maxW={80}>
|
||||
<VStack>
|
||||
<Icon as={RiInformationFill} boxSize={5} color="gray.500" />
|
||||
</VStack>
|
||||
</Tooltip>
|
||||
);
|
||||
};
|
||||
|
||||
export default InfoCircle;
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
Button,
|
||||
Text,
|
||||
useDisclosure,
|
||||
type InputGroupProps,
|
||||
} from "@chakra-ui/react";
|
||||
|
||||
import { FiChevronDown } from "react-icons/fi";
|
||||
@@ -20,15 +21,25 @@ type InputDropdownProps<T> = {
|
||||
options: ReadonlyArray<T>;
|
||||
selectedOption: T;
|
||||
onSelect: (option: T) => void;
|
||||
inputGroupProps?: InputGroupProps;
|
||||
};
|
||||
|
||||
const InputDropdown = <T,>({ options, selectedOption, onSelect }: InputDropdownProps<T>) => {
|
||||
const InputDropdown = <T,>({
|
||||
options,
|
||||
selectedOption,
|
||||
onSelect,
|
||||
inputGroupProps,
|
||||
}: InputDropdownProps<T>) => {
|
||||
const popover = useDisclosure();
|
||||
|
||||
return (
|
||||
<Popover placement="bottom-start" {...popover}>
|
||||
<PopoverTrigger>
|
||||
<InputGroup cursor="pointer" w={(selectedOption as string).length * 14 + 180}>
|
||||
<InputGroup
|
||||
cursor="pointer"
|
||||
w={(selectedOption as string).length * 14 + 180}
|
||||
{...inputGroupProps}
|
||||
>
|
||||
<Input
|
||||
value={selectedOption as string}
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function -- controlled input requires onChange
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
useHandledAsyncCallback,
|
||||
useVisibleScenarioIds,
|
||||
} from "~/utils/hooks";
|
||||
import { cellPadding } from "../constants";
|
||||
import { cellPadding } from "./constants";
|
||||
import { ActionButton } from "./ScenariosHeader";
|
||||
|
||||
export default function AddVariantButton() {
|
||||
|
||||
@@ -43,7 +43,7 @@ export default function OutputCell({
|
||||
|
||||
type OutputSchema = Parameters<typeof provider.normalizeOutput>[0];
|
||||
|
||||
const { mutateAsync: hardRefetchMutate } = api.scenarioVariantCells.forceRefetch.useMutation();
|
||||
const { mutateAsync: hardRefetchMutate } = api.scenarioVariantCells.hardRefetch.useMutation();
|
||||
const [hardRefetch, hardRefetching] = useHandledAsyncCallback(async () => {
|
||||
await hardRefetchMutate({ scenarioId: scenario.id, variantId: variant.id });
|
||||
await utils.scenarioVariantCells.get.invalidate({
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
VStack,
|
||||
} from "@chakra-ui/react";
|
||||
import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
|
||||
import { cellPadding } from "../constants";
|
||||
import { cellPadding } from "./constants";
|
||||
import { FloatingLabelInput } from "./FloatingLabelInput";
|
||||
import { ScenarioEditorModal } from "./ScenarioEditorModal";
|
||||
|
||||
@@ -111,25 +111,23 @@ export default function ScenarioEditor({
|
||||
onDrop={onReorder}
|
||||
backgroundColor={isDragTarget ? "gray.100" : "transparent"}
|
||||
>
|
||||
{variableLabels.length === 0 ? (
|
||||
<Box color="gray.500">
|
||||
{vars.data ? "No scenario variables configured" : "Loading..."}
|
||||
</Box>
|
||||
) : (
|
||||
{
|
||||
<VStack spacing={4} flex={1} py={2}>
|
||||
<HStack justifyContent="space-between" w="100%" align="center" spacing={0}>
|
||||
<Text flex={1}>Scenario</Text>
|
||||
<Tooltip label="Expand" hasArrow>
|
||||
<IconButton
|
||||
aria-label="Expand"
|
||||
icon={<Icon as={BsArrowsAngleExpand} boxSize={3} />}
|
||||
onClick={() => setScenarioEditorModalOpen(true)}
|
||||
size="xs"
|
||||
colorScheme="gray"
|
||||
color="gray.500"
|
||||
variant="ghost"
|
||||
/>
|
||||
</Tooltip>
|
||||
{variableLabels.length && (
|
||||
<Tooltip label="Expand" hasArrow>
|
||||
<IconButton
|
||||
aria-label="Expand"
|
||||
icon={<Icon as={BsArrowsAngleExpand} boxSize={3} />}
|
||||
onClick={() => setScenarioEditorModalOpen(true)}
|
||||
size="xs"
|
||||
colorScheme="gray"
|
||||
color="gray.500"
|
||||
variant="ghost"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
{canModify && props.canHide && (
|
||||
<Tooltip label="Delete" hasArrow>
|
||||
<IconButton
|
||||
@@ -150,31 +148,38 @@ export default function ScenarioEditor({
|
||||
</Tooltip>
|
||||
)}
|
||||
</HStack>
|
||||
{variableLabels.map((key) => {
|
||||
const value = values[key] ?? "";
|
||||
return (
|
||||
<FloatingLabelInput
|
||||
key={key}
|
||||
label={key}
|
||||
isDisabled={!canModify}
|
||||
style={{ width: "100%" }}
|
||||
maxHeight={32}
|
||||
value={value}
|
||||
onChange={(e) => {
|
||||
setValues((prev) => ({ ...prev, [key]: e.target.value }));
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
|
||||
e.preventDefault();
|
||||
e.currentTarget.blur();
|
||||
onSave();
|
||||
}
|
||||
}}
|
||||
onMouseEnter={() => setVariableInputHovered(true)}
|
||||
onMouseLeave={() => setVariableInputHovered(false)}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
||||
{variableLabels.length === 0 ? (
|
||||
<Box color="gray.500">
|
||||
{vars.data ? "No scenario variables configured" : "Loading..."}
|
||||
</Box>
|
||||
) : (
|
||||
variableLabels.map((key) => {
|
||||
const value = values[key] ?? "";
|
||||
return (
|
||||
<FloatingLabelInput
|
||||
key={key}
|
||||
label={key}
|
||||
isDisabled={!canModify}
|
||||
style={{ width: "100%" }}
|
||||
maxHeight={32}
|
||||
value={value}
|
||||
onChange={(e) => {
|
||||
setValues((prev) => ({ ...prev, [key]: e.target.value }));
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
|
||||
e.preventDefault();
|
||||
e.currentTarget.blur();
|
||||
onSave();
|
||||
}
|
||||
}}
|
||||
onMouseEnter={() => setVariableInputHovered(true)}
|
||||
onMouseLeave={() => setVariableInputHovered(false)}
|
||||
/>
|
||||
);
|
||||
})
|
||||
)}
|
||||
{hasChanged && (
|
||||
<HStack justify="right">
|
||||
<Button
|
||||
@@ -192,7 +197,7 @@ export default function ScenarioEditor({
|
||||
</HStack>
|
||||
)}
|
||||
</VStack>
|
||||
)}
|
||||
}
|
||||
</HStack>
|
||||
{scenarioEditorModalOpen && (
|
||||
<ScenarioEditorModal
|
||||
|
||||
@@ -65,11 +65,11 @@ export const ScenarioEditorModal = ({
|
||||
<Modal
|
||||
isOpen
|
||||
onClose={onClose}
|
||||
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "5xl", xl: "7xl" }}
|
||||
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "4xl", xl: "5xl" }}
|
||||
>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader />
|
||||
<ModalHeader>Edit Scenario</ModalHeader>
|
||||
<ModalCloseButton />
|
||||
<ModalBody maxW="unset">
|
||||
<VStack spacing={8}>
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
IconButton,
|
||||
Spinner,
|
||||
} from "@chakra-ui/react";
|
||||
import { cellPadding } from "../constants";
|
||||
import { cellPadding } from "./constants";
|
||||
import {
|
||||
useExperiment,
|
||||
useExperimentAccess,
|
||||
|
||||
@@ -110,7 +110,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||
setIsChanged(false);
|
||||
|
||||
await utils.promptVariants.list.invalidate();
|
||||
}, [checkForChanges]);
|
||||
}, [checkForChanges, replaceVariant.mutateAsync]);
|
||||
|
||||
useEffect(() => {
|
||||
if (monaco) {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { useState, type DragEvent } from "react";
|
||||
import { type PromptVariant } from "../OutputsTable/types";
|
||||
import { type PromptVariant } from "../types";
|
||||
import { api } from "~/utils/api";
|
||||
import { RiDraggable } from "react-icons/ri";
|
||||
import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here
|
||||
import { cellPadding, headerMinHeight } from "../constants";
|
||||
import AutoResizeTextArea from "../AutoResizeTextArea";
|
||||
import AutoResizeTextArea from "../../AutoResizeTextArea";
|
||||
import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
|
||||
|
||||
export default function VariantHeader(
|
||||
@@ -75,7 +75,7 @@ export default function VariantHeader(
|
||||
padding={0}
|
||||
sx={{
|
||||
position: "sticky",
|
||||
top: "-2",
|
||||
top: "0",
|
||||
// Ensure that the menu always appears above the sticky header of other variants
|
||||
zIndex: menuOpen ? "dropdown" : 10,
|
||||
}}
|
||||
@@ -1,6 +1,4 @@
|
||||
import { type PromptVariant } from "../OutputsTable/types";
|
||||
import { api } from "~/utils/api";
|
||||
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||
import { useState } from "react";
|
||||
import {
|
||||
Icon,
|
||||
Menu,
|
||||
@@ -14,10 +12,13 @@ import {
|
||||
} from "@chakra-ui/react";
|
||||
import { BsFillTrashFill, BsGear, BsStars } from "react-icons/bs";
|
||||
import { FaRegClone } from "react-icons/fa";
|
||||
import { useState } from "react";
|
||||
import { RefinePromptModal } from "../RefinePromptModal/RefinePromptModal";
|
||||
import { RiExchangeFundsFill } from "react-icons/ri";
|
||||
import { ChangeModelModal } from "../ChangeModelModal/ChangeModelModal";
|
||||
|
||||
import { api } from "~/utils/api";
|
||||
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||
import { type PromptVariant } from "../types";
|
||||
import { RefinePromptModal } from "../../RefinePromptModal/RefinePromptModal";
|
||||
import { ChangeModelModal } from "../../ChangeModelModal/ChangeModelModal";
|
||||
|
||||
export default function VariantHeaderMenuButton({
|
||||
variant,
|
||||
@@ -1,6 +1,6 @@
|
||||
import { HStack, Icon, Text, useToken } from "@chakra-ui/react";
|
||||
import { type PromptVariant } from "./types";
|
||||
import { cellPadding } from "../constants";
|
||||
import { cellPadding } from "./constants";
|
||||
import { api } from "~/utils/api";
|
||||
import chroma from "chroma-js";
|
||||
import { BsCurrencyDollar } from "react-icons/bs";
|
||||
|
||||
@@ -3,13 +3,14 @@ import { api } from "~/utils/api";
|
||||
import AddVariantButton from "./AddVariantButton";
|
||||
import ScenarioRow from "./ScenarioRow";
|
||||
import VariantEditor from "./VariantEditor";
|
||||
import VariantHeader from "../VariantHeader/VariantHeader";
|
||||
import VariantHeader from "./VariantHeader/VariantHeader";
|
||||
import VariantStats from "./VariantStats";
|
||||
import { ScenariosHeader } from "./ScenariosHeader";
|
||||
import { borders } from "./styles";
|
||||
import { useScenarios } from "~/utils/hooks";
|
||||
import ScenarioPaginator from "./ScenarioPaginator";
|
||||
import { Fragment } from "react";
|
||||
import useScrolledPast from "./useHasScrolledPast";
|
||||
|
||||
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
|
||||
const variants = api.promptVariants.list.useQuery(
|
||||
@@ -18,6 +19,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
||||
);
|
||||
|
||||
const scenarios = useScenarios();
|
||||
const shouldFlattenHeader = useScrolledPast(50);
|
||||
|
||||
if (!variants.data || !scenarios.data) return null;
|
||||
|
||||
@@ -63,8 +65,8 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
||||
variant={variant}
|
||||
canHide={variants.data.length > 1}
|
||||
rowStart={1}
|
||||
borderTopLeftRadius={isFirst ? 8 : 0}
|
||||
borderTopRightRadius={isLast ? 8 : 0}
|
||||
borderTopLeftRadius={isFirst && !shouldFlattenHeader ? 8 : 0}
|
||||
borderTopRightRadius={isLast && !shouldFlattenHeader ? 8 : 0}
|
||||
{...sharedProps}
|
||||
/>
|
||||
<GridItem rowStart={2} {...sharedProps}>
|
||||
@@ -75,6 +77,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
||||
{...sharedProps}
|
||||
borderBottomLeftRadius={isFirst ? 8 : 0}
|
||||
borderBottomRightRadius={isLast ? 8 : 0}
|
||||
boxShadow="5px 5px 15px 1px rgba(0, 0, 0, 0.1);"
|
||||
>
|
||||
<VariantStats variant={variant} />
|
||||
</GridItem>
|
||||
|
||||
34
app/src/components/OutputsTable/useHasScrolledPast.tsx
Normal file
34
app/src/components/OutputsTable/useHasScrolledPast.tsx
Normal file
@@ -0,0 +1,34 @@
|
||||
import { useState, useEffect } from "react";
|
||||
|
||||
const useScrolledPast = (scrollThreshold: number) => {
|
||||
const [hasScrolledPast, setHasScrolledPast] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
const container = document.getElementById("output-container");
|
||||
|
||||
if (!container) {
|
||||
console.warn('Element with id "outputs-container" not found.');
|
||||
return;
|
||||
}
|
||||
|
||||
const checkScroll = () => {
|
||||
const { scrollTop } = container;
|
||||
|
||||
// Check if scrollTop is greater than or equal to scrollThreshold
|
||||
setHasScrolledPast(scrollTop > scrollThreshold);
|
||||
};
|
||||
|
||||
checkScroll();
|
||||
|
||||
container.addEventListener("scroll", checkScroll);
|
||||
|
||||
// Cleanup
|
||||
return () => {
|
||||
container.removeEventListener("scroll", checkScroll);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return hasScrolledPast;
|
||||
};
|
||||
|
||||
export default useScrolledPast;
|
||||
@@ -1,15 +1,19 @@
|
||||
import { HStack, IconButton, Text, Select, type StackProps, Icon } from "@chakra-ui/react";
|
||||
import {
|
||||
HStack,
|
||||
IconButton,
|
||||
Text,
|
||||
Select,
|
||||
type StackProps,
|
||||
Icon,
|
||||
useBreakpointValue,
|
||||
} from "@chakra-ui/react";
|
||||
import React, { useCallback } from "react";
|
||||
import { FiChevronsLeft, FiChevronsRight, FiChevronLeft, FiChevronRight } from "react-icons/fi";
|
||||
import { usePageParams } from "~/utils/hooks";
|
||||
|
||||
const pageSizeOptions = [10, 25, 50, 100];
|
||||
|
||||
const Paginator = ({
|
||||
count,
|
||||
condense,
|
||||
...props
|
||||
}: { count: number; condense?: boolean } & StackProps) => {
|
||||
const Paginator = ({ count, ...props }: { count: number; condense?: boolean } & StackProps) => {
|
||||
const { page, pageSize, setPageParams } = usePageParams();
|
||||
|
||||
const lastPage = Math.ceil(count / pageSize);
|
||||
@@ -37,6 +41,9 @@ const Paginator = ({
|
||||
const goToLastPage = () => setPageParams({ page: lastPage }, "replace");
|
||||
const goToFirstPage = () => setPageParams({ page: 1 }, "replace");
|
||||
|
||||
const isMobile = useBreakpointValue({ base: true, md: false });
|
||||
const condense = isMobile || props.condense;
|
||||
|
||||
if (count === 0) return null;
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
import {
|
||||
HStack,
|
||||
Icon,
|
||||
VStack,
|
||||
Text,
|
||||
Divider,
|
||||
Spinner,
|
||||
AspectRatio,
|
||||
SkeletonText,
|
||||
} from "@chakra-ui/react";
|
||||
import { RiDatabase2Line } from "react-icons/ri";
|
||||
import { formatTimePast } from "~/utils/dayjs";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/router";
|
||||
import { BsPlusSquare } from "react-icons/bs";
|
||||
import { api } from "~/utils/api";
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
type DatasetData = {
|
||||
name: string;
|
||||
numEntries: number;
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
};
|
||||
|
||||
export const DatasetCard = ({ dataset }: { dataset: DatasetData }) => {
|
||||
return (
|
||||
<AspectRatio ratio={1.2} w="full">
|
||||
<VStack
|
||||
as={Link}
|
||||
href={{ pathname: "/data/[id]", query: { id: dataset.id } }}
|
||||
bg="gray.50"
|
||||
_hover={{ bg: "gray.100" }}
|
||||
transition="background 0.2s"
|
||||
cursor="pointer"
|
||||
borderColor="gray.200"
|
||||
borderWidth={1}
|
||||
p={4}
|
||||
justify="space-between"
|
||||
>
|
||||
<HStack w="full" color="gray.700" justify="center">
|
||||
<Icon as={RiDatabase2Line} boxSize={4} />
|
||||
<Text fontWeight="bold">{dataset.name}</Text>
|
||||
</HStack>
|
||||
<HStack h="full" spacing={4} flex={1} align="center">
|
||||
<CountLabel label="Rows" count={dataset.numEntries} />
|
||||
</HStack>
|
||||
<HStack w="full" color="gray.500" fontSize="xs" textAlign="center">
|
||||
<Text flex={1}>Created {formatTimePast(dataset.createdAt)}</Text>
|
||||
<Divider h={4} orientation="vertical" />
|
||||
<Text flex={1}>Updated {formatTimePast(dataset.updatedAt)}</Text>
|
||||
</HStack>
|
||||
</VStack>
|
||||
</AspectRatio>
|
||||
);
|
||||
};
|
||||
|
||||
const CountLabel = ({ label, count }: { label: string; count: number }) => {
|
||||
return (
|
||||
<VStack alignItems="center" flex={1}>
|
||||
<Text color="gray.500" fontWeight="bold">
|
||||
{label}
|
||||
</Text>
|
||||
<Text fontSize="sm" color="gray.500">
|
||||
{count}
|
||||
</Text>
|
||||
</VStack>
|
||||
);
|
||||
};
|
||||
|
||||
export const NewDatasetCard = () => {
|
||||
const router = useRouter();
|
||||
const selectedProjectId = useAppStore((s) => s.selectedProjectId);
|
||||
const createMutation = api.datasets.create.useMutation();
|
||||
const [createDataset, isLoading] = useHandledAsyncCallback(async () => {
|
||||
const newDataset = await createMutation.mutateAsync({ projectId: selectedProjectId ?? "" });
|
||||
await router.push({ pathname: "/data/[id]", query: { id: newDataset.id } });
|
||||
}, [createMutation, router, selectedProjectId]);
|
||||
|
||||
return (
|
||||
<AspectRatio ratio={1.2} w="full">
|
||||
<VStack
|
||||
align="center"
|
||||
justify="center"
|
||||
_hover={{ cursor: "pointer", bg: "gray.50" }}
|
||||
transition="background 0.2s"
|
||||
cursor="pointer"
|
||||
borderColor="gray.200"
|
||||
borderWidth={1}
|
||||
p={4}
|
||||
onClick={createDataset}
|
||||
>
|
||||
<Icon as={isLoading ? Spinner : BsPlusSquare} boxSize={8} />
|
||||
<Text display={{ base: "none", md: "block" }} ml={2}>
|
||||
New Dataset
|
||||
</Text>
|
||||
</VStack>
|
||||
</AspectRatio>
|
||||
);
|
||||
};
|
||||
|
||||
export const DatasetCardSkeleton = () => (
|
||||
<AspectRatio ratio={1.2} w="full">
|
||||
<VStack align="center" borderColor="gray.200" borderWidth={1} p={4} bg="gray.50">
|
||||
<SkeletonText noOfLines={1} w="80%" />
|
||||
<SkeletonText noOfLines={2} w="60%" />
|
||||
<SkeletonText noOfLines={1} w="80%" />
|
||||
</VStack>
|
||||
</AspectRatio>
|
||||
);
|
||||
@@ -1,16 +0,0 @@
|
||||
import { type StackProps } from "@chakra-ui/react";
|
||||
|
||||
import { useDatasetEntries } from "~/utils/hooks";
|
||||
import Paginator from "../Paginator";
|
||||
|
||||
const DatasetEntriesPaginator = (props: StackProps) => {
|
||||
const { data } = useDatasetEntries();
|
||||
|
||||
if (!data) return null;
|
||||
|
||||
const { count } = data;
|
||||
|
||||
return <Paginator count={count} {...props} />;
|
||||
};
|
||||
|
||||
export default DatasetEntriesPaginator;
|
||||
@@ -1,31 +0,0 @@
|
||||
import { type StackProps, VStack, Table, Th, Tr, Thead, Tbody, Text } from "@chakra-ui/react";
|
||||
import { useDatasetEntries } from "~/utils/hooks";
|
||||
import TableRow from "./TableRow";
|
||||
import DatasetEntriesPaginator from "./DatasetEntriesPaginator";
|
||||
|
||||
const DatasetEntriesTable = (props: StackProps) => {
|
||||
const { data } = useDatasetEntries();
|
||||
|
||||
return (
|
||||
<VStack justifyContent="space-between" {...props}>
|
||||
<Table variant="simple" sx={{ "table-layout": "fixed", width: "full" }}>
|
||||
<Thead>
|
||||
<Tr>
|
||||
<Th>Input</Th>
|
||||
<Th>Output</Th>
|
||||
</Tr>
|
||||
</Thead>
|
||||
<Tbody>{data?.entries.map((entry) => <TableRow key={entry.id} entry={entry} />)}</Tbody>
|
||||
</Table>
|
||||
{(!data || data.entries.length) === 0 ? (
|
||||
<Text alignSelf="flex-start" pl={6} color="gray.500">
|
||||
No entries found
|
||||
</Text>
|
||||
) : (
|
||||
<DatasetEntriesPaginator />
|
||||
)}
|
||||
</VStack>
|
||||
);
|
||||
};
|
||||
|
||||
export default DatasetEntriesTable;
|
||||
@@ -1,26 +0,0 @@
|
||||
import { Button, HStack, useDisclosure } from "@chakra-ui/react";
|
||||
import { BiImport } from "react-icons/bi";
|
||||
import { BsStars } from "react-icons/bs";
|
||||
|
||||
import { GenerateDataModal } from "./GenerateDataModal";
|
||||
|
||||
export const DatasetHeaderButtons = () => {
|
||||
const generateModalDisclosure = useDisclosure();
|
||||
|
||||
return (
|
||||
<>
|
||||
<HStack>
|
||||
<Button leftIcon={<BiImport />} colorScheme="blue" variant="ghost">
|
||||
Import Data
|
||||
</Button>
|
||||
<Button leftIcon={<BsStars />} colorScheme="blue" onClick={generateModalDisclosure.onOpen}>
|
||||
Generate Data
|
||||
</Button>
|
||||
</HStack>
|
||||
<GenerateDataModal
|
||||
isOpen={generateModalDisclosure.isOpen}
|
||||
onClose={generateModalDisclosure.onClose}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -1,128 +0,0 @@
|
||||
import {
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalCloseButton,
|
||||
ModalContent,
|
||||
ModalHeader,
|
||||
ModalOverlay,
|
||||
ModalFooter,
|
||||
Text,
|
||||
HStack,
|
||||
VStack,
|
||||
Icon,
|
||||
NumberInput,
|
||||
NumberInputField,
|
||||
NumberInputStepper,
|
||||
NumberIncrementStepper,
|
||||
NumberDecrementStepper,
|
||||
Button,
|
||||
} from "@chakra-ui/react";
|
||||
import { BsStars } from "react-icons/bs";
|
||||
import { useState } from "react";
|
||||
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { api } from "~/utils/api";
|
||||
import AutoResizeTextArea from "~/components/AutoResizeTextArea";
|
||||
|
||||
export const GenerateDataModal = ({
|
||||
isOpen,
|
||||
onClose,
|
||||
}: {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
}) => {
|
||||
const utils = api.useContext();
|
||||
|
||||
const datasetId = useDataset().data?.id;
|
||||
|
||||
const [numToGenerate, setNumToGenerate] = useState<number>(20);
|
||||
const [inputDescription, setInputDescription] = useState<string>(
|
||||
"Each input should contain an email body. Half of the emails should contain event details, and the other half should not.",
|
||||
);
|
||||
const [outputDescription, setOutputDescription] = useState<string>(
|
||||
`Each output should contain "true" or "false", where "true" indicates that the email contains event details.`,
|
||||
);
|
||||
|
||||
const generateEntriesMutation = api.datasetEntries.autogenerateEntries.useMutation();
|
||||
|
||||
const [generateEntries, generateEntriesInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!inputDescription || !outputDescription || !numToGenerate || !datasetId) return;
|
||||
await generateEntriesMutation.mutateAsync({
|
||||
datasetId,
|
||||
inputDescription,
|
||||
outputDescription,
|
||||
numToGenerate,
|
||||
});
|
||||
await utils.datasetEntries.list.invalidate();
|
||||
onClose();
|
||||
}, [
|
||||
generateEntriesMutation,
|
||||
onClose,
|
||||
inputDescription,
|
||||
outputDescription,
|
||||
numToGenerate,
|
||||
datasetId,
|
||||
]);
|
||||
|
||||
return (
|
||||
<Modal isOpen={isOpen} onClose={onClose} size={{ base: "xl", sm: "2xl", md: "3xl" }}>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={BsStars} />
|
||||
<Text>Generate Data</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
<ModalCloseButton />
|
||||
<ModalBody maxW="unset">
|
||||
<VStack w="full" spacing={8} padding={8} alignItems="flex-start">
|
||||
<VStack alignItems="flex-start" spacing={2}>
|
||||
<Text fontWeight="bold">Number of Rows:</Text>
|
||||
<NumberInput
|
||||
step={5}
|
||||
defaultValue={15}
|
||||
min={0}
|
||||
max={100}
|
||||
onChange={(valueString) => setNumToGenerate(parseInt(valueString) || 0)}
|
||||
value={numToGenerate}
|
||||
w="24"
|
||||
>
|
||||
<NumberInputField />
|
||||
<NumberInputStepper>
|
||||
<NumberIncrementStepper />
|
||||
<NumberDecrementStepper />
|
||||
</NumberInputStepper>
|
||||
</NumberInput>
|
||||
</VStack>
|
||||
<VStack alignItems="flex-start" w="full" spacing={2}>
|
||||
<Text fontWeight="bold">Input Description:</Text>
|
||||
<AutoResizeTextArea
|
||||
value={inputDescription}
|
||||
onChange={(e) => setInputDescription(e.target.value)}
|
||||
placeholder="Each input should contain..."
|
||||
/>
|
||||
</VStack>
|
||||
<VStack alignItems="flex-start" w="full" spacing={2}>
|
||||
<Text fontWeight="bold">Output Description (optional):</Text>
|
||||
<AutoResizeTextArea
|
||||
value={outputDescription}
|
||||
onChange={(e) => setOutputDescription(e.target.value)}
|
||||
placeholder="The output should contain..."
|
||||
/>
|
||||
</VStack>
|
||||
</VStack>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button
|
||||
colorScheme="blue"
|
||||
isLoading={generateEntriesInProgress}
|
||||
isDisabled={!numToGenerate || !inputDescription || !outputDescription}
|
||||
onClick={generateEntries}
|
||||
>
|
||||
Generate
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
@@ -1,13 +0,0 @@
|
||||
import { Td, Tr } from "@chakra-ui/react";
|
||||
import { type DatasetEntry } from "@prisma/client";
|
||||
|
||||
const TableRow = ({ entry }: { entry: DatasetEntry }) => {
|
||||
return (
|
||||
<Tr key={entry.id}>
|
||||
<Td>{entry.input}</Td>
|
||||
<Td>{entry.output}</Td>
|
||||
</Tr>
|
||||
);
|
||||
};
|
||||
|
||||
export default TableRow;
|
||||
@@ -14,21 +14,11 @@ import { formatTimePast } from "~/utils/dayjs";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/router";
|
||||
import { BsPlusSquare } from "react-icons/bs";
|
||||
import { api } from "~/utils/api";
|
||||
import { RouterOutputs, api } from "~/utils/api";
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
type ExperimentData = {
|
||||
testScenarioCount: number;
|
||||
promptVariantCount: number;
|
||||
id: string;
|
||||
label: string;
|
||||
sortIndex: number;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
};
|
||||
|
||||
export const ExperimentCard = ({ exp }: { exp: ExperimentData }) => {
|
||||
export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["list"][0] }) => {
|
||||
return (
|
||||
<Card
|
||||
w="full"
|
||||
@@ -45,7 +35,7 @@ export const ExperimentCard = ({ exp }: { exp: ExperimentData }) => {
|
||||
as={Link}
|
||||
w="full"
|
||||
h="full"
|
||||
href={{ pathname: "/experiments/[id]", query: { id: exp.id } }}
|
||||
href={{ pathname: "/experiments/[experimentSlug]", query: { experimentSlug: exp.slug } }}
|
||||
justify="space-between"
|
||||
>
|
||||
<HStack w="full" color="gray.700" justify="center">
|
||||
@@ -89,8 +79,8 @@ export const NewExperimentCard = () => {
|
||||
projectId: selectedProjectId ?? "",
|
||||
});
|
||||
await router.push({
|
||||
pathname: "/experiments/[id]",
|
||||
query: { id: newExperiment.id },
|
||||
pathname: "/experiments/[experimentSlug]",
|
||||
query: { experimentSlug: newExperiment.slug },
|
||||
});
|
||||
}, [createMutation, router, selectedProjectId]);
|
||||
|
||||
|
||||
@@ -16,11 +16,14 @@ export const useOnForkButtonPressed = () => {
|
||||
|
||||
const [onFork, isForking] = useHandledAsyncCallback(async () => {
|
||||
if (!experiment.data?.id || !selectedProjectId) return;
|
||||
const forkedExperimentId = await forkMutation.mutateAsync({
|
||||
const newExperiment = await forkMutation.mutateAsync({
|
||||
id: experiment.data.id,
|
||||
projectId: selectedProjectId,
|
||||
});
|
||||
await router.push({ pathname: "/experiments/[id]", query: { id: forkedExperimentId } });
|
||||
await router.push({
|
||||
pathname: "/experiments/[experimentSlug]",
|
||||
query: { experimentSlug: newExperiment.slug },
|
||||
});
|
||||
}, [forkMutation, experiment.data?.id, router]);
|
||||
|
||||
const onForkButtonPressed = useCallback(() => {
|
||||
|
||||
65
app/src/components/fineTunes/FineTunesTable.tsx
Normal file
65
app/src/components/fineTunes/FineTunesTable.tsx
Normal file
@@ -0,0 +1,65 @@
|
||||
import { Card, Table, Thead, Tr, Th, Tbody, Td, VStack, Icon, Text } from "@chakra-ui/react";
|
||||
import { FaTable } from "react-icons/fa";
|
||||
import { type FineTuneStatus } from "@prisma/client";
|
||||
|
||||
import dayjs from "~/utils/dayjs";
|
||||
import { useFineTunes } from "~/utils/hooks";
|
||||
|
||||
const FineTunesTable = ({}) => {
|
||||
const { data } = useFineTunes();
|
||||
|
||||
const fineTunes = data?.fineTunes || [];
|
||||
|
||||
return (
|
||||
<Card width="100%" overflowX="auto">
|
||||
{fineTunes.length ? (
|
||||
<Table>
|
||||
<Thead>
|
||||
<Tr>
|
||||
<Th>ID</Th>
|
||||
<Th>Created At</Th>
|
||||
<Th>Base Model</Th>
|
||||
<Th>Dataset Size</Th>
|
||||
<Th>Status</Th>
|
||||
</Tr>
|
||||
</Thead>
|
||||
<Tbody>
|
||||
{fineTunes.map((fineTune) => {
|
||||
return (
|
||||
<Tr key={fineTune.id}>
|
||||
<Td>{fineTune.slug}</Td>
|
||||
<Td>{dayjs(fineTune.createdAt).format("MMMM D h:mm A")}</Td>
|
||||
<Td>{fineTune.baseModel}</Td>
|
||||
<Td>{fineTune.dataset._count.datasetEntries}</Td>
|
||||
<Td fontSize="sm" fontWeight="bold">
|
||||
<Text color={getStatusColor(fineTune.status)}>{fineTune.status}</Text>
|
||||
</Td>
|
||||
</Tr>
|
||||
);
|
||||
})}
|
||||
</Tbody>
|
||||
</Table>
|
||||
) : (
|
||||
<VStack py={8}>
|
||||
<Icon as={FaTable} boxSize={16} color="gray.300" />
|
||||
<Text color="gray.400" fontSize="lg" fontWeight="bold">
|
||||
No Fine Tunes Found
|
||||
</Text>
|
||||
</VStack>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default FineTunesTable;
|
||||
|
||||
const getStatusColor = (status: FineTuneStatus) => {
|
||||
switch (status) {
|
||||
case "DEPLOYED":
|
||||
return "green.500";
|
||||
case "ERROR":
|
||||
return "red.500";
|
||||
default:
|
||||
return "yellow.500";
|
||||
}
|
||||
};
|
||||
@@ -15,12 +15,14 @@ import Head from "next/head";
|
||||
import Link from "next/link";
|
||||
import { BsGearFill, BsGithub, BsPersonCircle } from "react-icons/bs";
|
||||
import { IoStatsChartOutline } from "react-icons/io5";
|
||||
import { RiHome3Line, RiDatabase2Line, RiFlaskLine } from "react-icons/ri";
|
||||
import { RiHome3Line, RiFlaskLine } from "react-icons/ri";
|
||||
import { FaRobot } from "react-icons/fa";
|
||||
import { signIn, useSession } from "next-auth/react";
|
||||
import { env } from "~/env.mjs";
|
||||
import ProjectMenu from "./ProjectMenu";
|
||||
import NavSidebarOption from "./NavSidebarOption";
|
||||
import IconLink from "./IconLink";
|
||||
import { BetaModal } from "./BetaModal";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
const Divider = () => <Box h="1px" bgColor="gray.300" w="full" />;
|
||||
|
||||
@@ -71,21 +73,10 @@ const NavSidebar = () => {
|
||||
<ProjectMenu />
|
||||
<Divider />
|
||||
|
||||
{env.NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS && (
|
||||
<>
|
||||
<IconLink icon={RiHome3Line} label="Dashboard" href="/dashboard" beta />
|
||||
<IconLink
|
||||
icon={IoStatsChartOutline}
|
||||
label="Request Logs"
|
||||
href="/request-logs"
|
||||
beta
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
<IconLink icon={RiHome3Line} label="Dashboard" href="/dashboard" beta />
|
||||
<IconLink icon={IoStatsChartOutline} label="Request Logs" href="/request-logs" beta />
|
||||
<IconLink icon={FaRobot} label="Fine Tunes" href="/fine-tunes" beta />
|
||||
<IconLink icon={RiFlaskLine} label="Experiments" href="/experiments" />
|
||||
{env.NEXT_PUBLIC_SHOW_DATA && (
|
||||
<IconLink icon={RiDatabase2Line} label="Data" href="/data" />
|
||||
)}
|
||||
<VStack w="full" alignItems="flex-start" spacing={0} pt={8}>
|
||||
<Text
|
||||
pl={2}
|
||||
@@ -105,7 +96,7 @@ const NavSidebar = () => {
|
||||
<NavSidebarOption>
|
||||
<HStack
|
||||
w="full"
|
||||
p={4}
|
||||
p={{ base: 2, md: 4 }}
|
||||
as={ChakraLink}
|
||||
justifyContent="start"
|
||||
onClick={() => {
|
||||
@@ -141,10 +132,12 @@ export default function AppShell({
|
||||
children,
|
||||
title,
|
||||
requireAuth,
|
||||
requireBeta,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
title?: string;
|
||||
requireAuth?: boolean;
|
||||
requireBeta?: boolean;
|
||||
}) {
|
||||
const [vh, setVh] = useState("100vh"); // Default height to prevent flicker on initial render
|
||||
|
||||
@@ -174,15 +167,21 @@ export default function AppShell({
|
||||
}
|
||||
}, [requireAuth, user, authLoading]);
|
||||
|
||||
const flags = useAppStore((s) => s.featureFlags.featureFlags);
|
||||
const flagsLoaded = useAppStore((s) => s.featureFlags.flagsLoaded);
|
||||
|
||||
return (
|
||||
<Flex h={vh} w="100vw">
|
||||
<Head>
|
||||
<title>{title ? `${title} | OpenPipe` : "OpenPipe"}</title>
|
||||
</Head>
|
||||
<NavSidebar />
|
||||
<Box h="100%" flex={1} overflowY="auto" bgColor="gray.50">
|
||||
{children}
|
||||
</Box>
|
||||
</Flex>
|
||||
<>
|
||||
<Flex h={vh} w="100vw">
|
||||
<Head>
|
||||
<title>{title ? `${title} | OpenPipe` : "OpenPipe"}</title>
|
||||
</Head>
|
||||
<NavSidebar />
|
||||
<Box h="100%" flex={1} overflowY="auto" bgColor="gray.50">
|
||||
{children}
|
||||
</Box>
|
||||
</Flex>
|
||||
{requireBeta && flagsLoaded && !flags.betaAccess && <BetaModal />}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
67
app/src/components/nav/BetaModal.tsx
Normal file
67
app/src/components/nav/BetaModal.tsx
Normal file
@@ -0,0 +1,67 @@
|
||||
import {
|
||||
Button,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
ModalOverlay,
|
||||
VStack,
|
||||
Text,
|
||||
HStack,
|
||||
Icon,
|
||||
Link,
|
||||
} from "@chakra-ui/react";
|
||||
import { BsStars } from "react-icons/bs";
|
||||
import { useRouter } from "next/router";
|
||||
import { useSession } from "next-auth/react";
|
||||
|
||||
export const BetaModal = () => {
|
||||
const router = useRouter();
|
||||
const session = useSession();
|
||||
|
||||
const email = session.data?.user.email ?? "";
|
||||
|
||||
return (
|
||||
<Modal
|
||||
isOpen
|
||||
onClose={router.back}
|
||||
closeOnOverlayClick={false}
|
||||
size={{ base: "xl", md: "2xl" }}
|
||||
>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={BsStars} />
|
||||
<Text>Beta-Only Feature</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
<ModalBody maxW="unset">
|
||||
<VStack spacing={8} py={4} alignItems="flex-start">
|
||||
<Text fontSize="md">
|
||||
This feature is currently in beta. To receive early access to beta-only features, join
|
||||
the waitlist. You'll receive an email at <b>{email}</b> when you're approved.
|
||||
</Text>
|
||||
</VStack>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<HStack spacing={4}>
|
||||
<Button
|
||||
as={Link}
|
||||
textDecoration="none !important"
|
||||
colorScheme="orange"
|
||||
target="_blank"
|
||||
href={`https://ax3nafkw0jp.typeform.com/to/ZNpYqvAc#email=${email}`}
|
||||
>
|
||||
Join Waitlist
|
||||
</Button>
|
||||
<Button colorScheme="blue" onClick={router.back}>
|
||||
Done
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
Link as ChakraLink,
|
||||
Image,
|
||||
Box,
|
||||
Portal,
|
||||
} from "@chakra-ui/react";
|
||||
import { useEffect } from "react";
|
||||
import Link from "next/link";
|
||||
@@ -67,7 +68,13 @@ export default function ProjectMenu() {
|
||||
);
|
||||
|
||||
return (
|
||||
<VStack w="full" alignItems="flex-start" spacing={0} py={1}>
|
||||
<VStack
|
||||
w="full"
|
||||
alignItems="flex-start"
|
||||
spacing={0}
|
||||
py={1}
|
||||
zIndex={popover.isOpen ? "dropdown" : undefined}
|
||||
>
|
||||
<Popover
|
||||
placement="bottom"
|
||||
isOpen={popover.isOpen}
|
||||
@@ -103,64 +110,66 @@ export default function ProjectMenu() {
|
||||
</HStack>
|
||||
</NavSidebarOption>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
_focusVisible={{ outline: "unset" }}
|
||||
w={220}
|
||||
ml={{ base: 2, md: 0 }}
|
||||
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);"
|
||||
fontSize="sm"
|
||||
>
|
||||
<VStack alignItems="flex-start" spacing={1} py={1}>
|
||||
<Text px={3} py={2}>
|
||||
{user?.user.email}
|
||||
</Text>
|
||||
<Divider />
|
||||
<Text alignSelf="flex-start" fontWeight="bold" px={3} pt={2}>
|
||||
Your Projects
|
||||
</Text>
|
||||
<VStack spacing={0} w="full" px={1}>
|
||||
{projects?.map((proj) => (
|
||||
<ProjectOption
|
||||
key={proj.id}
|
||||
proj={proj}
|
||||
isActive={proj.id === selectedProjectId}
|
||||
onClose={popover.onClose}
|
||||
/>
|
||||
))}
|
||||
<HStack
|
||||
as={Button}
|
||||
variant="ghost"
|
||||
colorScheme="blue"
|
||||
color="blue.400"
|
||||
fontSize="sm"
|
||||
justifyContent="flex-start"
|
||||
onClick={createProject}
|
||||
w="full"
|
||||
borderRadius={4}
|
||||
spacing={0}
|
||||
>
|
||||
<Text>Add project</Text>
|
||||
<Icon as={isLoading ? Spinner : BsPlus} boxSize={4} strokeWidth={0.5} />
|
||||
</HStack>
|
||||
</VStack>
|
||||
<Portal>
|
||||
<PopoverContent
|
||||
_focusVisible={{ outline: "unset" }}
|
||||
w={220}
|
||||
ml={{ base: 2, md: 0 }}
|
||||
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);"
|
||||
fontSize="sm"
|
||||
>
|
||||
<VStack alignItems="flex-start" spacing={1} py={1}>
|
||||
<Text px={3} py={2}>
|
||||
{user?.user.email}
|
||||
</Text>
|
||||
<Divider />
|
||||
<Text alignSelf="flex-start" fontWeight="bold" px={3} pt={2}>
|
||||
Your Projects
|
||||
</Text>
|
||||
<VStack spacing={0} w="full" px={1}>
|
||||
{projects?.map((proj) => (
|
||||
<ProjectOption
|
||||
key={proj.id}
|
||||
proj={proj}
|
||||
isActive={proj.id === selectedProjectId}
|
||||
onClose={popover.onClose}
|
||||
/>
|
||||
))}
|
||||
<HStack
|
||||
as={Button}
|
||||
variant="ghost"
|
||||
colorScheme="blue"
|
||||
color="blue.400"
|
||||
fontSize="sm"
|
||||
justifyContent="flex-start"
|
||||
onClick={createProject}
|
||||
w="full"
|
||||
borderRadius={4}
|
||||
spacing={0}
|
||||
>
|
||||
<Text>Add project</Text>
|
||||
<Icon as={isLoading ? Spinner : BsPlus} boxSize={4} strokeWidth={0.5} />
|
||||
</HStack>
|
||||
</VStack>
|
||||
|
||||
<Divider />
|
||||
<VStack w="full" px={1}>
|
||||
<ChakraLink
|
||||
onClick={() => {
|
||||
signOut().catch(console.error);
|
||||
}}
|
||||
_hover={{ bgColor: "gray.200", textDecoration: "none" }}
|
||||
w="full"
|
||||
py={2}
|
||||
px={2}
|
||||
borderRadius={4}
|
||||
>
|
||||
<Text>Sign out</Text>
|
||||
</ChakraLink>
|
||||
<Divider />
|
||||
<VStack w="full" px={1}>
|
||||
<ChakraLink
|
||||
onClick={() => {
|
||||
signOut().catch(console.error);
|
||||
}}
|
||||
_hover={{ bgColor: "gray.200", textDecoration: "none" }}
|
||||
w="full"
|
||||
py={2}
|
||||
px={2}
|
||||
borderRadius={4}
|
||||
>
|
||||
<Text>Sign out</Text>
|
||||
</ChakraLink>
|
||||
</VStack>
|
||||
</VStack>
|
||||
</VStack>
|
||||
</PopoverContent>
|
||||
</PopoverContent>
|
||||
</Portal>
|
||||
</Popover>
|
||||
</VStack>
|
||||
);
|
||||
|
||||
@@ -23,50 +23,48 @@ export default function UserMenu({ user, ...rest }: { user: Session } & StackPro
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Popover placement="right">
|
||||
<PopoverTrigger>
|
||||
<NavSidebarOption>
|
||||
<HStack
|
||||
// Weird values to make mobile look right; can clean up when we make the sidebar disappear on mobile
|
||||
py={2}
|
||||
px={1}
|
||||
spacing={3}
|
||||
{...rest}
|
||||
>
|
||||
{profileImage}
|
||||
<VStack spacing={0} align="start" flex={1} flexShrink={1}>
|
||||
<Text fontWeight="bold" fontSize="sm">
|
||||
{user.user.name}
|
||||
</Text>
|
||||
<Text color="gray.500" fontSize="xs">
|
||||
{/* {user.user.email} */}
|
||||
</Text>
|
||||
</VStack>
|
||||
<Icon as={BsChevronRight} boxSize={4} color="gray.500" />
|
||||
</HStack>
|
||||
</NavSidebarOption>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent _focusVisible={{ outline: "unset" }} ml={-1} minW={48} w="full">
|
||||
<VStack align="stretch" spacing={0}>
|
||||
{/* sign out */}
|
||||
<HStack
|
||||
as={Link}
|
||||
onClick={() => {
|
||||
signOut().catch(console.error);
|
||||
}}
|
||||
px={4}
|
||||
py={2}
|
||||
spacing={4}
|
||||
color="gray.500"
|
||||
fontSize="sm"
|
||||
>
|
||||
<Icon as={BsBoxArrowRight} boxSize={6} />
|
||||
<Text>Sign out</Text>
|
||||
</HStack>
|
||||
</VStack>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</>
|
||||
<Popover placement="right">
|
||||
<PopoverTrigger>
|
||||
<NavSidebarOption>
|
||||
<HStack
|
||||
// Weird values to make mobile look right; can clean up when we make the sidebar disappear on mobile
|
||||
py={2}
|
||||
px={1}
|
||||
spacing={3}
|
||||
{...rest}
|
||||
>
|
||||
{profileImage}
|
||||
<VStack spacing={0} align="start" flex={1} flexShrink={1}>
|
||||
<Text fontWeight="bold" fontSize="sm">
|
||||
{user.user.name}
|
||||
</Text>
|
||||
<Text color="gray.500" fontSize="xs">
|
||||
{/* {user.user.email} */}
|
||||
</Text>
|
||||
</VStack>
|
||||
<Icon as={BsChevronRight} boxSize={4} color="gray.500" />
|
||||
</HStack>
|
||||
</NavSidebarOption>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent _focusVisible={{ outline: "unset" }} ml={-1} minW={48} w="full">
|
||||
<VStack align="stretch" spacing={0}>
|
||||
{/* sign out */}
|
||||
<HStack
|
||||
as={Link}
|
||||
onClick={() => {
|
||||
signOut().catch(console.error);
|
||||
}}
|
||||
px={4}
|
||||
py={2}
|
||||
spacing={4}
|
||||
color="gray.500"
|
||||
fontSize="sm"
|
||||
>
|
||||
<Icon as={BsBoxArrowRight} boxSize={6} />
|
||||
<Text>Sign out</Text>
|
||||
</HStack>
|
||||
</VStack>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ const ActionButton = ({
|
||||
>
|
||||
<HStack spacing={1}>
|
||||
{icon && <Icon as={icon} />}
|
||||
<Text>{label}</Text>
|
||||
<Text display={{ base: "none", md: "flex" }}>{label}</Text>
|
||||
</HStack>
|
||||
</Button>
|
||||
);
|
||||
|
||||
117
app/src/components/requestLogs/ColumnVisiblityDropdown.tsx
Normal file
117
app/src/components/requestLogs/ColumnVisiblityDropdown.tsx
Normal file
@@ -0,0 +1,117 @@
|
||||
import {
|
||||
Icon,
|
||||
Popover,
|
||||
PopoverTrigger,
|
||||
PopoverContent,
|
||||
VStack,
|
||||
HStack,
|
||||
Button,
|
||||
Text,
|
||||
useDisclosure,
|
||||
Box,
|
||||
} from "@chakra-ui/react";
|
||||
import { BiCheck } from "react-icons/bi";
|
||||
import { BsToggles } from "react-icons/bs";
|
||||
import { useMemo } from "react";
|
||||
|
||||
import { useIsClientRehydrated, useTagNames } from "~/utils/hooks";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { StaticColumnKeys } from "~/state/columnVisiblitySlice";
|
||||
import ActionButton from "./ActionButton";
|
||||
|
||||
const ColumnVisiblityDropdown = () => {
|
||||
const tagNames = useTagNames().data;
|
||||
|
||||
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
|
||||
const toggleColumnVisibility = useAppStore((s) => s.columnVisibility.toggleColumnVisibility);
|
||||
const totalColumns = Object.keys(StaticColumnKeys).length + (tagNames?.length ?? 0);
|
||||
|
||||
const popover = useDisclosure();
|
||||
|
||||
const columnVisiblityOptions = useMemo(() => {
|
||||
const options: { label: string; key: string }[] = [
|
||||
{
|
||||
label: "Sent At",
|
||||
key: StaticColumnKeys.SENT_AT,
|
||||
},
|
||||
{
|
||||
label: "Model",
|
||||
key: StaticColumnKeys.MODEL,
|
||||
},
|
||||
{
|
||||
label: "Duration",
|
||||
key: StaticColumnKeys.DURATION,
|
||||
},
|
||||
{
|
||||
label: "Input Tokens",
|
||||
key: StaticColumnKeys.INPUT_TOKENS,
|
||||
},
|
||||
{
|
||||
label: "Output Tokens",
|
||||
key: StaticColumnKeys.OUTPUT_TOKENS,
|
||||
},
|
||||
{
|
||||
label: "Status Code",
|
||||
key: StaticColumnKeys.STATUS_CODE,
|
||||
},
|
||||
];
|
||||
for (const tagName of tagNames ?? []) {
|
||||
options.push({
|
||||
label: tagName,
|
||||
key: tagName,
|
||||
});
|
||||
}
|
||||
return options;
|
||||
}, [tagNames]);
|
||||
|
||||
const isClientRehydrated = useIsClientRehydrated();
|
||||
if (!isClientRehydrated) return null;
|
||||
|
||||
return (
|
||||
<Popover
|
||||
placement="bottom-start"
|
||||
isOpen={popover.isOpen}
|
||||
onOpen={popover.onOpen}
|
||||
onClose={popover.onClose}
|
||||
>
|
||||
<PopoverTrigger>
|
||||
<Box>
|
||||
<ActionButton
|
||||
label={`Columns (${visibleColumns.size}/${totalColumns})`}
|
||||
icon={BsToggles}
|
||||
/>
|
||||
</Box>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);" minW={0} w="auto">
|
||||
<VStack spacing={0} maxH={400} overflowY="auto">
|
||||
{columnVisiblityOptions?.map((option, index) => (
|
||||
<HStack
|
||||
key={index}
|
||||
as={Button}
|
||||
onClick={() => toggleColumnVisibility(option.key)}
|
||||
w="full"
|
||||
minH={10}
|
||||
variant="ghost"
|
||||
justifyContent="space-between"
|
||||
fontWeight="semibold"
|
||||
borderRadius={0}
|
||||
colorScheme="blue"
|
||||
color="black"
|
||||
fontSize="sm"
|
||||
borderBottomWidth={1}
|
||||
>
|
||||
<Text mr={16}>{option.label}</Text>
|
||||
<Box w={5}>
|
||||
{visibleColumns.has(option.key) && (
|
||||
<Icon as={BiCheck} color="blue.500" boxSize={5} />
|
||||
)}
|
||||
</Box>
|
||||
</HStack>
|
||||
))}
|
||||
</VStack>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
|
||||
export default ColumnVisiblityDropdown;
|
||||
210
app/src/components/requestLogs/ExportButton.tsx
Normal file
210
app/src/components/requestLogs/ExportButton.tsx
Normal file
@@ -0,0 +1,210 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import {
|
||||
Modal,
|
||||
ModalOverlay,
|
||||
ModalContent,
|
||||
ModalHeader,
|
||||
ModalCloseButton,
|
||||
ModalBody,
|
||||
ModalFooter,
|
||||
HStack,
|
||||
VStack,
|
||||
Icon,
|
||||
Text,
|
||||
Button,
|
||||
Checkbox,
|
||||
NumberInput,
|
||||
NumberInputField,
|
||||
NumberInputStepper,
|
||||
NumberIncrementStepper,
|
||||
NumberDecrementStepper,
|
||||
Collapse,
|
||||
Flex,
|
||||
useDisclosure,
|
||||
type UseDisclosureReturn,
|
||||
} from "@chakra-ui/react";
|
||||
import { BiExport } from "react-icons/bi";
|
||||
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { api } from "~/utils/api";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import ActionButton from "./ActionButton";
|
||||
import InputDropdown from "../InputDropdown";
|
||||
import { FiChevronUp, FiChevronDown } from "react-icons/fi";
|
||||
import InfoCircle from "../InfoCircle";
|
||||
|
||||
const SUPPORTED_EXPORT_FORMATS = ["alpaca-finetune", "openai-fine-tune", "unformatted"];
|
||||
|
||||
const ExportButton = () => {
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
return (
|
||||
<>
|
||||
<ActionButton
|
||||
onClick={disclosure.onOpen}
|
||||
label="Export"
|
||||
icon={BiExport}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
/>
|
||||
<ExportLogsModal disclosure={disclosure} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ExportButton;
|
||||
|
||||
const ExportLogsModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
const selectedProjectId = useAppStore((s) => s.selectedProjectId);
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
const clearSelectedLogIds = useAppStore((s) => s.selectedLogs.clearSelectedLogIds);
|
||||
|
||||
const [selectedExportFormat, setSelectedExportFormat] = useState(SUPPORTED_EXPORT_FORMATS[0]);
|
||||
const [testingSplit, setTestingSplit] = useState(10);
|
||||
const [removeDuplicates, setRemoveDuplicates] = useState(true);
|
||||
const [showAdvancedOptions, setShowAdvancedOptions] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (disclosure.isOpen) {
|
||||
setSelectedExportFormat(SUPPORTED_EXPORT_FORMATS[0]);
|
||||
setTestingSplit(10);
|
||||
setRemoveDuplicates(true);
|
||||
}
|
||||
}, [disclosure.isOpen]);
|
||||
|
||||
const exportLogsMutation = api.loggedCalls.export.useMutation();
|
||||
|
||||
const [exportLogs, exportInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!selectedProjectId || !selectedLogIds.size || !testingSplit || !selectedExportFormat)
|
||||
return;
|
||||
const response = await exportLogsMutation.mutateAsync({
|
||||
projectId: selectedProjectId,
|
||||
selectedLogIds: Array.from(selectedLogIds),
|
||||
testingSplit,
|
||||
selectedExportFormat,
|
||||
removeDuplicates,
|
||||
});
|
||||
|
||||
const dataUrl = `data:application/pdf;base64,${response}`;
|
||||
const blob = await fetch(dataUrl).then((res) => res.blob());
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement("a");
|
||||
|
||||
a.href = url;
|
||||
a.download = `data.zip`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
|
||||
disclosure.onClose();
|
||||
clearSelectedLogIds();
|
||||
}, [
|
||||
exportLogsMutation,
|
||||
selectedProjectId,
|
||||
selectedLogIds,
|
||||
testingSplit,
|
||||
selectedExportFormat,
|
||||
removeDuplicates,
|
||||
]);
|
||||
|
||||
return (
|
||||
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={BiExport} />
|
||||
<Text>Export Logs</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
<ModalCloseButton />
|
||||
<ModalBody maxW="unset">
|
||||
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
|
||||
<Text>
|
||||
We'll export the <b>{selectedLogIds.size}</b> logs you have selected in the format of
|
||||
your choice.
|
||||
</Text>
|
||||
<VStack alignItems="flex-start" spacing={4}>
|
||||
<Flex
|
||||
flexDir={{ base: "column", md: "row" }}
|
||||
alignItems={{ base: "flex-start", md: "center" }}
|
||||
>
|
||||
<HStack w={48} alignItems="center" spacing={1}>
|
||||
<Text fontWeight="bold">Format:</Text>
|
||||
<InfoCircle tooltipText="Format logs for for fine tuning or export them without formatting." />
|
||||
</HStack>
|
||||
<InputDropdown
|
||||
options={SUPPORTED_EXPORT_FORMATS}
|
||||
selectedOption={selectedExportFormat}
|
||||
onSelect={(option) => setSelectedExportFormat(option)}
|
||||
inputGroupProps={{ w: 48 }}
|
||||
/>
|
||||
</Flex>
|
||||
<Flex
|
||||
flexDir={{ base: "column", md: "row" }}
|
||||
alignItems={{ base: "flex-start", md: "center" }}
|
||||
>
|
||||
<HStack w={48} alignItems="center" spacing={1}>
|
||||
<Text fontWeight="bold">Testing Split:</Text>
|
||||
<InfoCircle tooltipText="The percent of your logs that will be reserved for testing and saved in another file. Logs are split randomly." />
|
||||
</HStack>
|
||||
<HStack>
|
||||
<NumberInput
|
||||
defaultValue={10}
|
||||
onChange={(_, num) => setTestingSplit(num)}
|
||||
min={1}
|
||||
max={100}
|
||||
w={48}
|
||||
>
|
||||
<NumberInputField />
|
||||
<NumberInputStepper>
|
||||
<NumberIncrementStepper />
|
||||
<NumberDecrementStepper />
|
||||
</NumberInputStepper>
|
||||
</NumberInput>
|
||||
</HStack>
|
||||
</Flex>
|
||||
</VStack>
|
||||
<VStack alignItems="flex-start" spacing={0}>
|
||||
<Button
|
||||
variant="unstyled"
|
||||
color="blue.600"
|
||||
onClick={() => setShowAdvancedOptions(!showAdvancedOptions)}
|
||||
>
|
||||
<HStack>
|
||||
<Text>Advanced Options</Text>
|
||||
<Icon as={showAdvancedOptions ? FiChevronUp : FiChevronDown} />
|
||||
</HStack>
|
||||
</Button>
|
||||
<Collapse in={showAdvancedOptions} unmountOnExit={true}>
|
||||
<VStack align="stretch" pt={4}>
|
||||
<HStack>
|
||||
<Checkbox
|
||||
colorScheme="blue"
|
||||
isChecked={removeDuplicates}
|
||||
onChange={(e) => setRemoveDuplicates(e.target.checked)}
|
||||
>
|
||||
<Text>Remove duplicates</Text>
|
||||
</Checkbox>
|
||||
<InfoCircle tooltipText="To avoid overfitting and speed up training, automatically deduplicate logs with matching input and output." />
|
||||
</HStack>
|
||||
</VStack>
|
||||
</Collapse>
|
||||
</VStack>
|
||||
</VStack>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<HStack>
|
||||
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button colorScheme="blue" onClick={exportLogs} isLoading={exportInProgress} minW={24}>
|
||||
Export
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
161
app/src/components/requestLogs/FineTuneButton.tsx
Normal file
161
app/src/components/requestLogs/FineTuneButton.tsx
Normal file
@@ -0,0 +1,161 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import {
|
||||
Modal,
|
||||
ModalOverlay,
|
||||
ModalContent,
|
||||
ModalHeader,
|
||||
ModalCloseButton,
|
||||
ModalBody,
|
||||
ModalFooter,
|
||||
HStack,
|
||||
VStack,
|
||||
Icon,
|
||||
Text,
|
||||
Button,
|
||||
useDisclosure,
|
||||
type UseDisclosureReturn,
|
||||
Input,
|
||||
} from "@chakra-ui/react";
|
||||
import { FaRobot } from "react-icons/fa";
|
||||
import humanId from "human-id";
|
||||
import { useRouter } from "next/router";
|
||||
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { api } from "~/utils/api";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import ActionButton from "./ActionButton";
|
||||
import InputDropdown from "../InputDropdown";
|
||||
import { FiChevronDown } from "react-icons/fi";
|
||||
|
||||
const SUPPORTED_BASE_MODELS = ["llama2-7b", "llama2-13b", "llama2-70b", "gpt-3.5-turbo"];
|
||||
|
||||
const FineTuneButton = () => {
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
return (
|
||||
<>
|
||||
<ActionButton
|
||||
onClick={disclosure.onOpen}
|
||||
label="Fine Tune"
|
||||
icon={FaRobot}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
/>
|
||||
<FineTuneModal disclosure={disclosure} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default FineTuneButton;
|
||||
|
||||
const FineTuneModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
const selectedProjectId = useAppStore((s) => s.selectedProjectId);
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
const clearSelectedLogIds = useAppStore((s) => s.selectedLogs.clearSelectedLogIds);
|
||||
|
||||
const [selectedBaseModel, setSelectedBaseModel] = useState(SUPPORTED_BASE_MODELS[0]);
|
||||
const [modelSlug, setModelSlug] = useState(humanId({ separator: "-", capitalize: false }));
|
||||
|
||||
useEffect(() => {
|
||||
if (disclosure.isOpen) {
|
||||
setSelectedBaseModel(SUPPORTED_BASE_MODELS[0]);
|
||||
setModelSlug(humanId({ separator: "-", capitalize: false }));
|
||||
}
|
||||
}, [disclosure.isOpen]);
|
||||
|
||||
const utils = api.useContext();
|
||||
const router = useRouter();
|
||||
|
||||
const createFineTuneMutation = api.fineTunes.create.useMutation();
|
||||
|
||||
const [createFineTune, creationInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!selectedProjectId || !modelSlug || !selectedBaseModel || !selectedLogIds.size) return;
|
||||
await createFineTuneMutation.mutateAsync({
|
||||
projectId: selectedProjectId,
|
||||
slug: modelSlug,
|
||||
baseModel: selectedBaseModel,
|
||||
selectedLogIds: Array.from(selectedLogIds),
|
||||
});
|
||||
|
||||
await utils.fineTunes.list.invalidate();
|
||||
await router.push({ pathname: "/fine-tunes" });
|
||||
clearSelectedLogIds();
|
||||
disclosure.onClose();
|
||||
}, [createFineTuneMutation, selectedProjectId, selectedLogIds, modelSlug, selectedBaseModel]);
|
||||
|
||||
return (
|
||||
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={FaRobot} />
|
||||
<Text>Fine Tune</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
<ModalCloseButton />
|
||||
<ModalBody maxW="unset">
|
||||
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
|
||||
<Text>
|
||||
We'll train on the <b>{selectedLogIds.size}</b> logs you've selected.
|
||||
</Text>
|
||||
<VStack>
|
||||
<HStack spacing={2} w="full">
|
||||
<Text fontWeight="bold" w={36}>
|
||||
Model ID:
|
||||
</Text>
|
||||
<Input
|
||||
value={modelSlug}
|
||||
onChange={(e) => setModelSlug(e.target.value)}
|
||||
w={48}
|
||||
placeholder="unique-id"
|
||||
onKeyDown={(e) => {
|
||||
// If the user types anything other than a-z, A-Z, or 0-9, replace it with -
|
||||
if (!/[a-zA-Z0-9]/.test(e.key)) {
|
||||
e.preventDefault();
|
||||
setModelSlug((s) => s && `${s}-`);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</HStack>
|
||||
<HStack spacing={2}>
|
||||
<Text fontWeight="bold" w={36}>
|
||||
Base model:
|
||||
</Text>
|
||||
<InputDropdown
|
||||
options={SUPPORTED_BASE_MODELS}
|
||||
selectedOption={selectedBaseModel}
|
||||
onSelect={(option) => setSelectedBaseModel(option)}
|
||||
inputGroupProps={{ w: 48 }}
|
||||
/>
|
||||
</HStack>
|
||||
</VStack>
|
||||
<Button variant="unstyled" color="blue.600">
|
||||
<HStack>
|
||||
<Text>Advanced Options</Text>
|
||||
<Icon as={FiChevronDown} />
|
||||
</HStack>
|
||||
</Button>
|
||||
</VStack>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<HStack>
|
||||
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
colorScheme="blue"
|
||||
onClick={createFineTune}
|
||||
isLoading={creationInProgress}
|
||||
minW={24}
|
||||
isDisabled={!modelSlug}
|
||||
>
|
||||
Start Training
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
@@ -10,7 +10,7 @@ export default function LoggedCallsTable() {
|
||||
return (
|
||||
<Card width="100%" overflowX="auto">
|
||||
<Table>
|
||||
<TableHeader showCheckbox />
|
||||
<TableHeader showOptions />
|
||||
<Tbody>
|
||||
{loggedCalls?.calls?.map((loggedCall) => {
|
||||
return (
|
||||
@@ -25,7 +25,7 @@ export default function LoggedCallsTable() {
|
||||
setExpandedRow(loggedCall.id);
|
||||
}
|
||||
}}
|
||||
showCheckbox
|
||||
showOptions
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -14,21 +14,19 @@ import {
|
||||
Text,
|
||||
Checkbox,
|
||||
} from "@chakra-ui/react";
|
||||
import dayjs from "dayjs";
|
||||
import relativeTime from "dayjs/plugin/relativeTime";
|
||||
import Link from "next/link";
|
||||
|
||||
import dayjs from "~/utils/dayjs";
|
||||
import { type RouterOutputs } from "~/utils/api";
|
||||
import { FormattedJson } from "./FormattedJson";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { useLoggedCalls, useTagNames } from "~/utils/hooks";
|
||||
import { useIsClientRehydrated, useLoggedCalls, useTagNames } from "~/utils/hooks";
|
||||
import { useMemo } from "react";
|
||||
|
||||
dayjs.extend(relativeTime);
|
||||
import { StaticColumnKeys } from "~/state/columnVisiblitySlice";
|
||||
|
||||
type LoggedCall = RouterOutputs["loggedCalls"]["list"]["calls"][0];
|
||||
|
||||
export const TableHeader = ({ showCheckbox }: { showCheckbox?: boolean }) => {
|
||||
export const TableHeader = ({ showOptions }: { showOptions?: boolean }) => {
|
||||
const matchingLogIds = useLoggedCalls().data?.matchingLogIds;
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
const addAll = useAppStore((s) => s.selectedLogs.addSelectedLogIds);
|
||||
@@ -38,10 +36,14 @@ export const TableHeader = ({ showCheckbox }: { showCheckbox?: boolean }) => {
|
||||
return matchingLogIds.every((id) => selectedLogIds.has(id));
|
||||
}, [selectedLogIds, matchingLogIds]);
|
||||
const tagNames = useTagNames().data;
|
||||
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
|
||||
const isClientRehydrated = useIsClientRehydrated();
|
||||
if (!isClientRehydrated) return null;
|
||||
|
||||
return (
|
||||
<Thead>
|
||||
<Tr>
|
||||
{showCheckbox && (
|
||||
{showOptions && (
|
||||
<Th pr={0}>
|
||||
<HStack minW={16}>
|
||||
<Checkbox
|
||||
@@ -57,13 +59,19 @@ export const TableHeader = ({ showCheckbox }: { showCheckbox?: boolean }) => {
|
||||
</HStack>
|
||||
</Th>
|
||||
)}
|
||||
<Th>Sent At</Th>
|
||||
<Th>Model</Th>
|
||||
{tagNames?.map((tagName) => <Th key={tagName}>{tagName}</Th>)}
|
||||
<Th isNumeric>Duration</Th>
|
||||
<Th isNumeric>Input tokens</Th>
|
||||
<Th isNumeric>Output tokens</Th>
|
||||
<Th isNumeric>Status</Th>
|
||||
{visibleColumns.has(StaticColumnKeys.SENT_AT) && <Th>Sent At</Th>}
|
||||
{visibleColumns.has(StaticColumnKeys.MODEL) && <Th>Model</Th>}
|
||||
{tagNames
|
||||
?.filter((tagName) => visibleColumns.has(tagName))
|
||||
.map((tagName) => (
|
||||
<Th key={tagName} textTransform={"none"}>
|
||||
{tagName}
|
||||
</Th>
|
||||
))}
|
||||
{visibleColumns.has(StaticColumnKeys.DURATION) && <Th isNumeric>Duration</Th>}
|
||||
{visibleColumns.has(StaticColumnKeys.INPUT_TOKENS) && <Th isNumeric>Input tokens</Th>}
|
||||
{visibleColumns.has(StaticColumnKeys.OUTPUT_TOKENS) && <Th isNumeric>Output tokens</Th>}
|
||||
{visibleColumns.has(StaticColumnKeys.STATUS_CODE) && <Th isNumeric>Status</Th>}
|
||||
</Tr>
|
||||
</Thead>
|
||||
);
|
||||
@@ -73,12 +81,12 @@ export const TableRow = ({
|
||||
loggedCall,
|
||||
isExpanded,
|
||||
onToggle,
|
||||
showCheckbox,
|
||||
showOptions,
|
||||
}: {
|
||||
loggedCall: LoggedCall;
|
||||
isExpanded: boolean;
|
||||
onToggle: () => void;
|
||||
showCheckbox?: boolean;
|
||||
showOptions?: boolean;
|
||||
}) => {
|
||||
const isError = loggedCall.modelResponse?.statusCode !== 200;
|
||||
const requestedAt = dayjs(loggedCall.requestedAt).format("MMMM D h:mm A");
|
||||
@@ -88,6 +96,14 @@ export const TableRow = ({
|
||||
const toggleChecked = useAppStore((s) => s.selectedLogs.toggleSelectedLogId);
|
||||
|
||||
const tagNames = useTagNames().data;
|
||||
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
|
||||
|
||||
const visibleTagNames = useMemo(() => {
|
||||
return tagNames?.filter((tagName) => visibleColumns.has(tagName)) ?? [];
|
||||
}, [tagNames, visibleColumns]);
|
||||
|
||||
const isClientRehydrated = useIsClientRehydrated();
|
||||
if (!isClientRehydrated) return null;
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -100,50 +116,64 @@ export const TableRow = ({
|
||||
}}
|
||||
fontSize="sm"
|
||||
>
|
||||
{showCheckbox && (
|
||||
{showOptions && (
|
||||
<Td>
|
||||
<Checkbox isChecked={isChecked} onChange={() => toggleChecked(loggedCall.id)} />
|
||||
</Td>
|
||||
)}
|
||||
<Td>
|
||||
<Tooltip label={fullTime} placement="top">
|
||||
<Box whiteSpace="nowrap" minW="120px">
|
||||
{requestedAt}
|
||||
</Box>
|
||||
</Tooltip>
|
||||
</Td>
|
||||
<Td>
|
||||
<HStack justifyContent="flex-start">
|
||||
<Text
|
||||
colorScheme="purple"
|
||||
color="purple.500"
|
||||
borderColor="purple.500"
|
||||
px={1}
|
||||
borderRadius={4}
|
||||
borderWidth={1}
|
||||
fontSize="xs"
|
||||
whiteSpace="nowrap"
|
||||
>
|
||||
{loggedCall.model}
|
||||
</Text>
|
||||
</HStack>
|
||||
</Td>
|
||||
{tagNames?.map((tagName) => <Td key={tagName}>{loggedCall.tags[tagName]}</Td>)}
|
||||
<Td isNumeric>
|
||||
{loggedCall.cacheHit ? (
|
||||
<Text color="gray.500">Cached</Text>
|
||||
) : (
|
||||
((loggedCall.modelResponse?.durationMs ?? 0) / 1000).toFixed(2) + "s"
|
||||
)}
|
||||
</Td>
|
||||
<Td isNumeric>{loggedCall.modelResponse?.inputTokens}</Td>
|
||||
<Td isNumeric>{loggedCall.modelResponse?.outputTokens}</Td>
|
||||
<Td sx={{ color: isError ? "red.500" : "green.500", fontWeight: "semibold" }} isNumeric>
|
||||
{loggedCall.modelResponse?.statusCode ?? "No response"}
|
||||
</Td>
|
||||
{visibleColumns.has(StaticColumnKeys.SENT_AT) && (
|
||||
<Td>
|
||||
<Tooltip label={fullTime} placement="top">
|
||||
<Box whiteSpace="nowrap" minW="120px">
|
||||
{requestedAt}
|
||||
</Box>
|
||||
</Tooltip>
|
||||
</Td>
|
||||
)}
|
||||
{visibleColumns.has(StaticColumnKeys.MODEL) && (
|
||||
<Td>
|
||||
<HStack justifyContent="flex-start">
|
||||
<Text
|
||||
colorScheme="purple"
|
||||
color="purple.500"
|
||||
borderColor="purple.500"
|
||||
px={1}
|
||||
borderRadius={4}
|
||||
borderWidth={1}
|
||||
fontSize="xs"
|
||||
whiteSpace="nowrap"
|
||||
>
|
||||
{loggedCall.model}
|
||||
</Text>
|
||||
</HStack>
|
||||
</Td>
|
||||
)}
|
||||
{visibleTagNames.map((tagName) => (
|
||||
<Td key={tagName}>{loggedCall.tags[tagName]}</Td>
|
||||
))}
|
||||
{visibleColumns.has(StaticColumnKeys.DURATION) && (
|
||||
<Td isNumeric>
|
||||
{loggedCall.cacheHit ? (
|
||||
<Text color="gray.500">Cached</Text>
|
||||
) : (
|
||||
((loggedCall.modelResponse?.durationMs ?? 0) / 1000).toFixed(2) + "s"
|
||||
)}
|
||||
</Td>
|
||||
)}
|
||||
{visibleColumns.has(StaticColumnKeys.INPUT_TOKENS) && (
|
||||
<Td isNumeric>{loggedCall.modelResponse?.inputTokens}</Td>
|
||||
)}
|
||||
{visibleColumns.has(StaticColumnKeys.OUTPUT_TOKENS) && (
|
||||
<Td isNumeric>{loggedCall.modelResponse?.outputTokens}</Td>
|
||||
)}
|
||||
{visibleColumns.has(StaticColumnKeys.STATUS_CODE) && (
|
||||
<Td sx={{ color: isError ? "red.500" : "green.500", fontWeight: "semibold" }} isNumeric>
|
||||
{loggedCall.modelResponse?.statusCode ?? "No response"}
|
||||
</Td>
|
||||
)}
|
||||
</Tr>
|
||||
<Tr>
|
||||
<Td colSpan={8} p={0}>
|
||||
<Td colSpan={visibleColumns.size + 1} w="full" p={0}>
|
||||
<Collapse in={isExpanded} unmountOnExit={true}>
|
||||
<VStack p={4} align="stretch">
|
||||
<HStack align="stretch">
|
||||
|
||||
@@ -26,6 +26,14 @@ export const env = createEnv({
|
||||
SMTP_PORT: z.string().default("placeholder"),
|
||||
SMTP_LOGIN: z.string().default("placeholder"),
|
||||
SMTP_PASSWORD: z.string().default("placeholder"),
|
||||
WORKER_CONCURRENCY: z
|
||||
.string()
|
||||
.default("10")
|
||||
.transform((val) => parseInt(val)),
|
||||
WORKER_MAX_POOL_SIZE: z
|
||||
.string()
|
||||
.default("10")
|
||||
.transform((val) => parseInt(val)),
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -38,8 +46,6 @@ export const env = createEnv({
|
||||
NEXT_PUBLIC_SOCKET_URL: z.string().url().default("http://localhost:3318"),
|
||||
NEXT_PUBLIC_HOST: z.string().url().default("http://localhost:3000"),
|
||||
NEXT_PUBLIC_SENTRY_DSN: z.string().optional(),
|
||||
NEXT_PUBLIC_SHOW_DATA: z.string().optional(),
|
||||
NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS: z.string().optional(),
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -54,7 +60,6 @@ export const env = createEnv({
|
||||
NEXT_PUBLIC_POSTHOG_KEY: process.env.NEXT_PUBLIC_POSTHOG_KEY,
|
||||
NEXT_PUBLIC_SOCKET_URL: process.env.NEXT_PUBLIC_SOCKET_URL,
|
||||
NEXT_PUBLIC_HOST: process.env.NEXT_PUBLIC_HOST,
|
||||
NEXT_PUBLIC_SHOW_DATA: process.env.NEXT_PUBLIC_SHOW_DATA,
|
||||
GITHUB_CLIENT_ID: process.env.GITHUB_CLIENT_ID,
|
||||
GITHUB_CLIENT_SECRET: process.env.GITHUB_CLIENT_SECRET,
|
||||
REPLICATE_API_TOKEN: process.env.REPLICATE_API_TOKEN,
|
||||
@@ -62,12 +67,13 @@ export const env = createEnv({
|
||||
NEXT_PUBLIC_SENTRY_DSN: process.env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
OPENPIPE_API_KEY: process.env.OPENPIPE_API_KEY,
|
||||
NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS: process.env.NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS,
|
||||
SENDER_EMAIL: process.env.SENDER_EMAIL,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_LOGIN: process.env.SMTP_LOGIN,
|
||||
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
|
||||
WORKER_CONCURRENCY: process.env.WORKER_CONCURRENCY,
|
||||
WORKER_MAX_POOL_SIZE: process.env.WORKER_MAX_POOL_SIZE,
|
||||
},
|
||||
/**
|
||||
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.
|
||||
|
||||
@@ -16,7 +16,16 @@ export async function getCompletion(
|
||||
try {
|
||||
if (onStream) {
|
||||
const resp = await openai.chat.completions.create(
|
||||
{ ...input, stream: true },
|
||||
{
|
||||
...input,
|
||||
stream: true,
|
||||
openpipe: {
|
||||
tags: {
|
||||
prompt_id: "getCompletion",
|
||||
stream: "true",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
maxRetries: 0,
|
||||
},
|
||||
@@ -34,7 +43,16 @@ export async function getCompletion(
|
||||
}
|
||||
} else {
|
||||
const resp = await openai.chat.completions.create(
|
||||
{ ...input, stream: false },
|
||||
{
|
||||
...input,
|
||||
stream: false,
|
||||
openpipe: {
|
||||
tags: {
|
||||
prompt_id: "getCompletion",
|
||||
stream: "false",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
maxRetries: 0,
|
||||
},
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
// templateSystemUserAssistantPrompt,
|
||||
templateInstructionInputResponsePrompt,
|
||||
templateAiroborosPrompt,
|
||||
templateGryphePrompt,
|
||||
templateVicunaPrompt,
|
||||
} from "./templatePrompt";
|
||||
|
||||
@@ -69,6 +70,15 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatO
|
||||
learnMoreUrl: "https://huggingface.co/lmsys/vicuna-13b-v1.5",
|
||||
templatePrompt: templateVicunaPrompt,
|
||||
},
|
||||
"Gryphe/MythoMax-L2-13b": {
|
||||
name: "MythoMax-L2-13b",
|
||||
contextWindow: 4096,
|
||||
pricePerSecond: 0.0003,
|
||||
speed: "medium",
|
||||
provider: "openpipe/Chat",
|
||||
learnMoreUrl: "https://huggingface.co/Gryphe/MythoMax-L2-13b",
|
||||
templatePrompt: templateGryphePrompt,
|
||||
},
|
||||
"NousResearch/Nous-Hermes-llama-2-7b": {
|
||||
name: "Nous-Hermes-llama-2-7b",
|
||||
contextWindow: 4096,
|
||||
|
||||
@@ -13,6 +13,7 @@ const modelEndpoints: Record<OpenpipeChatInput["model"], string> = {
|
||||
"NousResearch/Nous-Hermes-Llama2-13b": "https://ncv8pw3u0vb8j2-8000.proxy.runpod.net/v1",
|
||||
"jondurbin/airoboros-l2-13b-gpt4-2.0": "https://9nrbx7oph4btou-8000.proxy.runpod.net/v1",
|
||||
"lmsys/vicuna-13b-v1.5": "https://h88hkt3ux73rb7-8000.proxy.runpod.net/v1",
|
||||
"Gryphe/MythoMax-L2-13b": "https://3l5jvhnxdgky3v-8000.proxy.runpod.net/v1",
|
||||
"NousResearch/Nous-Hermes-llama-2-7b": "https://ua1bpc6kv3dgge-8000.proxy.runpod.net/v1",
|
||||
};
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ const supportedModels = [
|
||||
"NousResearch/Nous-Hermes-Llama2-13b",
|
||||
"jondurbin/airoboros-l2-13b-gpt4-2.0",
|
||||
"lmsys/vicuna-13b-v1.5",
|
||||
"Gryphe/MythoMax-L2-13b",
|
||||
"NousResearch/Nous-Hermes-llama-2-7b",
|
||||
] as const;
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
"NousResearch/Nous-Hermes-Llama2-13b",
|
||||
"jondurbin/airoboros-l2-13b-gpt4-2.0",
|
||||
"lmsys/vicuna-13b-v1.5",
|
||||
"Gryphe/MythoMax-L2-13b",
|
||||
"NousResearch/Nous-Hermes-llama-2-7b"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -223,3 +223,52 @@ export const templateVicunaPrompt = (messages: OpenpipeChatInput["messages"]) =>
|
||||
|
||||
return prompt.trim();
|
||||
};
|
||||
|
||||
// <System prompt/Character Card>
|
||||
|
||||
// ### Instruction:
|
||||
// Your instruction or question here.
|
||||
// For roleplay purposes, I suggest the following - Write <CHAR NAME>'s next reply in a chat between <YOUR NAME> and <CHAR NAME>. Write a single reply only.
|
||||
|
||||
// ### Response:
|
||||
export const templateGryphePrompt = (messages: OpenpipeChatInput["messages"]) => {
|
||||
const splitter = "\n\n";
|
||||
|
||||
const instructionTag = "### Instruction:\n";
|
||||
const responseTag = "### Response:\n";
|
||||
|
||||
let combinedSystemMessage = "";
|
||||
const conversationMessages = [];
|
||||
|
||||
for (const message of messages) {
|
||||
if (message.role === "system") {
|
||||
combinedSystemMessage += message.content;
|
||||
} else if (message.role === "user") {
|
||||
conversationMessages.push(instructionTag + message.content);
|
||||
} else {
|
||||
conversationMessages.push(responseTag + message.content);
|
||||
}
|
||||
}
|
||||
|
||||
let systemMessage = "";
|
||||
|
||||
if (combinedSystemMessage) {
|
||||
// If there is no user message, add a user tag to the system message
|
||||
if (conversationMessages.find((message) => message.startsWith(instructionTag))) {
|
||||
systemMessage = `${combinedSystemMessage}\n\n`;
|
||||
} else {
|
||||
conversationMessages.unshift(instructionTag + combinedSystemMessage);
|
||||
}
|
||||
}
|
||||
|
||||
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
|
||||
|
||||
// Ensure that the prompt ends with an assistant message
|
||||
const lastInstructionIndex = prompt.lastIndexOf(instructionTag);
|
||||
const lastAssistantIndex = prompt.lastIndexOf(responseTag);
|
||||
if (lastInstructionIndex > lastAssistantIndex) {
|
||||
prompt += splitter + responseTag;
|
||||
}
|
||||
|
||||
return prompt;
|
||||
};
|
||||
|
||||
@@ -33,7 +33,7 @@ export default function Dashboard() {
|
||||
);
|
||||
|
||||
return (
|
||||
<AppShell title="Dashboard" requireAuth>
|
||||
<AppShell title="Dashboard" requireAuth requireBeta>
|
||||
<VStack px={8} py={8} alignItems="flex-start" spacing={4}>
|
||||
<Text fontSize="2xl" fontWeight="bold">
|
||||
Dashboard
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
import {
|
||||
Box,
|
||||
Breadcrumb,
|
||||
BreadcrumbItem,
|
||||
Center,
|
||||
Flex,
|
||||
Icon,
|
||||
Input,
|
||||
VStack,
|
||||
} from "@chakra-ui/react";
|
||||
import Link from "next/link";
|
||||
|
||||
import { useRouter } from "next/router";
|
||||
import { useState, useEffect } from "react";
|
||||
import { RiDatabase2Line } from "react-icons/ri";
|
||||
import AppShell from "~/components/nav/AppShell";
|
||||
import { api } from "~/utils/api";
|
||||
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import DatasetEntriesTable from "~/components/datasets/DatasetEntriesTable";
|
||||
import { DatasetHeaderButtons } from "~/components/datasets/DatasetHeaderButtons/DatasetHeaderButtons";
|
||||
import PageHeaderContainer from "~/components/nav/PageHeaderContainer";
|
||||
import ProjectBreadcrumbContents from "~/components/nav/ProjectBreadcrumbContents";
|
||||
|
||||
export default function Dataset() {
|
||||
const router = useRouter();
|
||||
const utils = api.useContext();
|
||||
|
||||
const dataset = useDataset();
|
||||
const datasetId = router.query.id as string;
|
||||
|
||||
const [name, setName] = useState(dataset.data?.name || "");
|
||||
useEffect(() => {
|
||||
setName(dataset.data?.name || "");
|
||||
}, [dataset.data?.name]);
|
||||
|
||||
const updateMutation = api.datasets.update.useMutation();
|
||||
const [onSaveName] = useHandledAsyncCallback(async () => {
|
||||
if (name && name !== dataset.data?.name && dataset.data?.id) {
|
||||
await updateMutation.mutateAsync({
|
||||
id: dataset.data.id,
|
||||
updates: { name: name },
|
||||
});
|
||||
await Promise.all([utils.datasets.list.invalidate(), utils.datasets.get.invalidate()]);
|
||||
}
|
||||
}, [updateMutation, dataset.data?.id, dataset.data?.name, name]);
|
||||
|
||||
if (!dataset.isLoading && !dataset.data) {
|
||||
return (
|
||||
<AppShell title="Dataset not found">
|
||||
<Center h="100%">
|
||||
<div>Dataset not found 😕</div>
|
||||
</Center>
|
||||
</AppShell>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<AppShell title={dataset.data?.name}>
|
||||
<VStack h="full">
|
||||
<PageHeaderContainer>
|
||||
<Breadcrumb>
|
||||
<BreadcrumbItem>
|
||||
<ProjectBreadcrumbContents projectName={dataset.data?.project?.name} />
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbItem>
|
||||
<Link href="/data">
|
||||
<Flex alignItems="center" _hover={{ textDecoration: "underline" }}>
|
||||
<Icon as={RiDatabase2Line} boxSize={4} mr={2} /> Datasets
|
||||
</Flex>
|
||||
</Link>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbItem isCurrentPage>
|
||||
<Input
|
||||
size="sm"
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
onBlur={onSaveName}
|
||||
borderWidth={1}
|
||||
borderColor="transparent"
|
||||
fontSize={16}
|
||||
px={0}
|
||||
minW={{ base: 100, lg: 300 }}
|
||||
flex={1}
|
||||
_hover={{ borderColor: "gray.300" }}
|
||||
_focus={{ borderColor: "blue.500", outline: "none" }}
|
||||
/>
|
||||
</BreadcrumbItem>
|
||||
</Breadcrumb>
|
||||
<DatasetHeaderButtons />
|
||||
</PageHeaderContainer>
|
||||
<Box w="full" overflowX="auto" flex={1} px={8} pt={8} pb={16}>
|
||||
{datasetId && <DatasetEntriesTable />}
|
||||
</Box>
|
||||
</VStack>
|
||||
</AppShell>
|
||||
);
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
import { SimpleGrid, Icon, Breadcrumb, BreadcrumbItem, Flex } from "@chakra-ui/react";
|
||||
import AppShell from "~/components/nav/AppShell";
|
||||
import { RiDatabase2Line } from "react-icons/ri";
|
||||
import {
|
||||
DatasetCard,
|
||||
DatasetCardSkeleton,
|
||||
NewDatasetCard,
|
||||
} from "~/components/datasets/DatasetCard";
|
||||
import PageHeaderContainer from "~/components/nav/PageHeaderContainer";
|
||||
import ProjectBreadcrumbContents from "~/components/nav/ProjectBreadcrumbContents";
|
||||
import { useDatasets } from "~/utils/hooks";
|
||||
|
||||
export default function DatasetsPage() {
|
||||
const datasets = useDatasets();
|
||||
|
||||
return (
|
||||
<AppShell title="Data" requireAuth>
|
||||
<PageHeaderContainer>
|
||||
<Breadcrumb>
|
||||
<BreadcrumbItem>
|
||||
<ProjectBreadcrumbContents />
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbItem minH={8}>
|
||||
<Flex alignItems="center">
|
||||
<Icon as={RiDatabase2Line} boxSize={4} mr={2} /> Datasets
|
||||
</Flex>
|
||||
</BreadcrumbItem>
|
||||
</Breadcrumb>
|
||||
</PageHeaderContainer>
|
||||
<SimpleGrid w="full" columns={{ base: 1, md: 2, lg: 3, xl: 4 }} spacing={8} py={4} px={8}>
|
||||
<NewDatasetCard />
|
||||
{datasets.data && !datasets.isLoading ? (
|
||||
datasets?.data?.map((dataset) => (
|
||||
<DatasetCard
|
||||
key={dataset.id}
|
||||
dataset={{ ...dataset, numEntries: dataset._count.datasetEntries }}
|
||||
/>
|
||||
))
|
||||
) : (
|
||||
<>
|
||||
<DatasetCardSkeleton />
|
||||
<DatasetCardSkeleton />
|
||||
<DatasetCardSkeleton />
|
||||
</>
|
||||
)}
|
||||
</SimpleGrid>
|
||||
</AppShell>
|
||||
);
|
||||
}
|
||||
@@ -33,9 +33,9 @@ export default function Experiment() {
|
||||
|
||||
const experiment = useExperiment();
|
||||
const experimentStats = api.experiments.stats.useQuery(
|
||||
{ id: router.query.id as string },
|
||||
{ id: experiment.data?.id as string },
|
||||
{
|
||||
enabled: !!router.query.id,
|
||||
enabled: !!experiment.data?.id,
|
||||
},
|
||||
);
|
||||
const stats = experimentStats.data;
|
||||
@@ -124,8 +124,8 @@ export default function Experiment() {
|
||||
<ExperimentHeaderButtons />
|
||||
</PageHeaderContainer>
|
||||
<ExperimentSettingsDrawer />
|
||||
<Box w="100%" overflowX="auto" flex={1}>
|
||||
<OutputsTable experimentId={router.query.id as string | undefined} />
|
||||
<Box w="100%" overflowX="auto" flex={1} id="output-container">
|
||||
<OutputsTable experimentId={experiment.data?.id} />
|
||||
</Box>
|
||||
</VStack>
|
||||
</AppShell>
|
||||
18
app/src/pages/fine-tunes/index.tsx
Normal file
18
app/src/pages/fine-tunes/index.tsx
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Text, VStack, Divider } from "@chakra-ui/react";
|
||||
import FineTunesTable from "~/components/fineTunes/FineTunesTable";
|
||||
|
||||
import AppShell from "~/components/nav/AppShell";
|
||||
|
||||
export default function FineTunes() {
|
||||
return (
|
||||
<AppShell title="Fine Tunes" requireAuth requireBeta>
|
||||
<VStack px={8} py={8} alignItems="flex-start" spacing={4} w="full">
|
||||
<Text fontSize="2xl" fontWeight="bold">
|
||||
Fine Tunes
|
||||
</Text>
|
||||
<Divider />
|
||||
<FineTunesTable />
|
||||
</VStack>
|
||||
</AppShell>
|
||||
);
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { useState } from "react";
|
||||
import { Text, VStack, Divider, HStack } from "@chakra-ui/react";
|
||||
import { Text, VStack, Divider, HStack, Box } from "@chakra-ui/react";
|
||||
|
||||
import AppShell from "~/components/nav/AppShell";
|
||||
import LoggedCallTable from "~/components/requestLogs/LoggedCallsTable";
|
||||
@@ -9,6 +9,9 @@ import { useAppStore } from "~/state/store";
|
||||
import { RiFlaskLine } from "react-icons/ri";
|
||||
import { FiFilter } from "react-icons/fi";
|
||||
import LogFilters from "~/components/requestLogs/LogFilters/LogFilters";
|
||||
import ColumnVisiblityDropdown from "~/components/requestLogs/ColumnVisiblityDropdown";
|
||||
import FineTuneButton from "~/components/requestLogs/FineTuneButton";
|
||||
import ExportButton from "~/components/requestLogs/ExportButton";
|
||||
|
||||
export default function LoggedCalls() {
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
@@ -16,33 +19,38 @@ export default function LoggedCalls() {
|
||||
const [filtersShown, setFiltersShown] = useState(true);
|
||||
|
||||
return (
|
||||
<AppShell title="Request Logs" requireAuth>
|
||||
<VStack px={8} py={8} alignItems="flex-start" spacing={4} w="full">
|
||||
<Text fontSize="2xl" fontWeight="bold">
|
||||
Request Logs
|
||||
</Text>
|
||||
<Divider />
|
||||
<HStack w="full" justifyContent="flex-end">
|
||||
<ActionButton
|
||||
onClick={() => {
|
||||
setFiltersShown(!filtersShown);
|
||||
}}
|
||||
label={filtersShown ? "Hide Filters" : "Show Filters"}
|
||||
icon={FiFilter}
|
||||
/>
|
||||
<ActionButton
|
||||
onClick={() => {
|
||||
console.log("experimenting with these ids", selectedLogIds);
|
||||
}}
|
||||
label="Experiment"
|
||||
icon={RiFlaskLine}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
/>
|
||||
</HStack>
|
||||
{filtersShown && <LogFilters />}
|
||||
<LoggedCallTable />
|
||||
<LoggedCallsPaginator />
|
||||
</VStack>
|
||||
<AppShell title="Request Logs" requireAuth requireBeta>
|
||||
<Box h="100vh" overflowY="scroll">
|
||||
<VStack px={8} py={8} alignItems="flex-start" spacing={4} w="full">
|
||||
<Text fontSize="2xl" fontWeight="bold">
|
||||
Request Logs
|
||||
</Text>
|
||||
<Divider />
|
||||
<HStack w="full" justifyContent="flex-end">
|
||||
<FineTuneButton />
|
||||
<ActionButton
|
||||
onClick={() => {
|
||||
console.log("experimenting with these ids", selectedLogIds);
|
||||
}}
|
||||
label="Experiment"
|
||||
icon={RiFlaskLine}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
/>
|
||||
<ExportButton />
|
||||
<ColumnVisiblityDropdown />
|
||||
<ActionButton
|
||||
onClick={() => {
|
||||
setFiltersShown(!filtersShown);
|
||||
}}
|
||||
label={filtersShown ? "Hide Filters" : "Show Filters"}
|
||||
icon={FiFilter}
|
||||
/>
|
||||
</HStack>
|
||||
{filtersShown && <LogFilters />}
|
||||
<LoggedCallTable />
|
||||
<LoggedCallsPaginator />
|
||||
</VStack>
|
||||
</Box>
|
||||
</AppShell>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
import { type ChatCompletion } from "openai/resources/chat";
|
||||
import { openai } from "../../utils/openai";
|
||||
import { isAxiosError } from "./utils";
|
||||
import { type APIResponse } from "openai/core";
|
||||
import { sleep } from "~/server/utils/sleep";
|
||||
|
||||
const MAX_AUTO_RETRIES = 50;
|
||||
const MIN_DELAY = 500; // milliseconds
|
||||
const MAX_DELAY = 15000; // milliseconds
|
||||
|
||||
function calculateDelay(numPreviousTries: number): number {
|
||||
const baseDelay = Math.min(MAX_DELAY, MIN_DELAY * Math.pow(2, numPreviousTries));
|
||||
const jitter = Math.random() * baseDelay;
|
||||
return baseDelay + jitter;
|
||||
}
|
||||
|
||||
const getCompletionWithBackoff = async (
|
||||
getCompletion: () => Promise<APIResponse<ChatCompletion>>,
|
||||
) => {
|
||||
let completion;
|
||||
let tries = 0;
|
||||
while (tries < MAX_AUTO_RETRIES) {
|
||||
try {
|
||||
completion = await getCompletion();
|
||||
break;
|
||||
} catch (e) {
|
||||
if (isAxiosError(e)) {
|
||||
console.error(e?.response?.data?.error?.message);
|
||||
} else {
|
||||
await sleep(calculateDelay(tries));
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
tries++;
|
||||
}
|
||||
return completion;
|
||||
};
|
||||
// TODO: Add seeds to ensure batches don't contain duplicate data
|
||||
const MAX_BATCH_SIZE = 5;
|
||||
|
||||
export const autogenerateDatasetEntries = async (
|
||||
numToGenerate: number,
|
||||
inputDescription: string,
|
||||
outputDescription: string,
|
||||
): Promise<{ input: string; output: string }[]> => {
|
||||
const batchSizes = Array.from({ length: Math.ceil(numToGenerate / MAX_BATCH_SIZE) }, (_, i) =>
|
||||
i === Math.ceil(numToGenerate / MAX_BATCH_SIZE) - 1 && numToGenerate % MAX_BATCH_SIZE
|
||||
? numToGenerate % MAX_BATCH_SIZE
|
||||
: MAX_BATCH_SIZE,
|
||||
);
|
||||
|
||||
const getCompletion = (batchSize: number) =>
|
||||
openai.chat.completions.create({
|
||||
model: "gpt-4",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: `The user needs ${batchSize} rows of data, each with an input and an output.\n---\n The input should follow these requirements: ${inputDescription}\n---\n The output should follow these requirements: ${outputDescription}`,
|
||||
},
|
||||
],
|
||||
functions: [
|
||||
{
|
||||
name: "add_list_of_data",
|
||||
description: "Add a list of data to the database",
|
||||
parameters: {
|
||||
type: "object",
|
||||
properties: {
|
||||
rows: {
|
||||
type: "array",
|
||||
description: "The rows of data that match the description",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
input: {
|
||||
type: "string",
|
||||
description: "The input for this row",
|
||||
},
|
||||
output: {
|
||||
type: "string",
|
||||
description: "The output for this row",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
function_call: { name: "add_list_of_data" },
|
||||
temperature: 0.5,
|
||||
});
|
||||
|
||||
const completionCallbacks = batchSizes.map((batchSize) =>
|
||||
getCompletionWithBackoff(() => getCompletion(batchSize)),
|
||||
);
|
||||
|
||||
const completions = await Promise.all(completionCallbacks);
|
||||
|
||||
const rows = completions.flatMap((completion) => {
|
||||
const parsed = JSON.parse(
|
||||
completion?.choices[0]?.message?.function_call?.arguments ?? "{rows: []}",
|
||||
) as { rows: { input: string; output: string }[] };
|
||||
return parsed.rows;
|
||||
});
|
||||
|
||||
return rows;
|
||||
};
|
||||
@@ -98,6 +98,11 @@ export const autogenerateScenarioValues = async (
|
||||
|
||||
function_call: { name: "add_scenario" },
|
||||
temperature: 0.5,
|
||||
openpipe: {
|
||||
tags: {
|
||||
prompt_id: "autogenerateScenarioValues",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(
|
||||
|
||||
13
app/src/server/api/external/v1Api.router.ts
vendored
13
app/src/server/api/external/v1Api.router.ts
vendored
@@ -66,7 +66,7 @@ export const v1ApiRouter = createOpenApiRouter({
|
||||
|
||||
if (!existingResponse) return { respPayload: null };
|
||||
|
||||
await prisma.loggedCall.create({
|
||||
const newCall = await prisma.loggedCall.create({
|
||||
data: {
|
||||
projectId: ctx.key.projectId,
|
||||
requestedAt: new Date(input.requestedAt),
|
||||
@@ -75,11 +75,7 @@ export const v1ApiRouter = createOpenApiRouter({
|
||||
},
|
||||
});
|
||||
|
||||
await createTags(
|
||||
existingResponse.originalLoggedCall.projectId,
|
||||
existingResponse.originalLoggedCallId,
|
||||
input.tags,
|
||||
);
|
||||
await createTags(newCall.projectId, newCall.id, input.tags);
|
||||
return {
|
||||
respPayload: existingResponse.respPayload,
|
||||
};
|
||||
@@ -111,7 +107,7 @@ export const v1ApiRouter = createOpenApiRouter({
|
||||
.default({}),
|
||||
}),
|
||||
)
|
||||
.output(z.object({ status: z.literal("ok") }))
|
||||
.output(z.object({ status: z.union([z.literal("ok"), z.literal("error")]) }))
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const reqPayload = await reqValidator.spa(input.reqPayload);
|
||||
const respPayload = await respValidator.spa(input.respPayload);
|
||||
@@ -212,6 +208,7 @@ export const v1ApiRouter = createOpenApiRouter({
|
||||
createdAt: true,
|
||||
cacheHit: true,
|
||||
tags: true,
|
||||
id: true,
|
||||
modelResponse: {
|
||||
select: {
|
||||
id: true,
|
||||
@@ -237,7 +234,7 @@ async function createTags(projectId: string, loggedCallId: string, tags: Record<
|
||||
const tagsToCreate = Object.entries(tags).map(([name, value]) => ({
|
||||
projectId,
|
||||
loggedCallId,
|
||||
name: name.replaceAll(/[^a-zA-Z0-9_$]/g, "_"),
|
||||
name: name.replaceAll(/[^a-zA-Z0-9_$.]/g, "_"),
|
||||
value,
|
||||
}));
|
||||
await prisma.loggedCallTag.createMany({
|
||||
|
||||
@@ -6,11 +6,10 @@ import { scenarioVariantCellsRouter } from "./routers/scenarioVariantCells.route
|
||||
import { scenarioVarsRouter } from "./routers/scenarioVariables.router";
|
||||
import { evaluationsRouter } from "./routers/evaluations.router";
|
||||
import { worldChampsRouter } from "./routers/worldChamps.router";
|
||||
import { datasetsRouter } from "./routers/datasets.router";
|
||||
import { datasetEntries } from "./routers/datasetEntries.router";
|
||||
import { projectsRouter } from "./routers/projects.router";
|
||||
import { dashboardRouter } from "./routers/dashboard.router";
|
||||
import { loggedCallsRouter } from "./routers/loggedCalls.router";
|
||||
import { fineTunesRouter } from "./routers/fineTunes.router";
|
||||
import { usersRouter } from "./routers/users.router";
|
||||
import { adminJobsRouter } from "./routers/adminJobs.router";
|
||||
|
||||
@@ -27,11 +26,10 @@ export const appRouter = createTRPCRouter({
|
||||
scenarioVars: scenarioVarsRouter,
|
||||
evaluations: evaluationsRouter,
|
||||
worldChamps: worldChampsRouter,
|
||||
datasets: datasetsRouter,
|
||||
datasetEntries: datasetEntries,
|
||||
projects: projectsRouter,
|
||||
dashboard: dashboardRouter,
|
||||
loggedCalls: loggedCallsRouter,
|
||||
fineTunes: fineTunesRouter,
|
||||
users: usersRouter,
|
||||
adminJobs: adminJobsRouter,
|
||||
});
|
||||
|
||||
@@ -1,145 +0,0 @@
|
||||
import { z } from "zod";
|
||||
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
|
||||
import { prisma } from "~/server/db";
|
||||
import { requireCanModifyDataset, requireCanViewDataset } from "~/utils/accessControl";
|
||||
import { autogenerateDatasetEntries } from "../autogenerate/autogenerateDatasetEntries";
|
||||
|
||||
export const datasetEntries = createTRPCRouter({
|
||||
list: protectedProcedure
|
||||
.input(z.object({ datasetId: z.string(), page: z.number(), pageSize: z.number() }))
|
||||
.query(async ({ input, ctx }) => {
|
||||
await requireCanViewDataset(input.datasetId, ctx);
|
||||
|
||||
const { datasetId, page, pageSize } = input;
|
||||
|
||||
const entries = await prisma.datasetEntry.findMany({
|
||||
where: {
|
||||
datasetId,
|
||||
},
|
||||
orderBy: { createdAt: "desc" },
|
||||
skip: (page - 1) * pageSize,
|
||||
take: pageSize,
|
||||
});
|
||||
|
||||
const count = await prisma.datasetEntry.count({
|
||||
where: {
|
||||
datasetId,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
entries,
|
||||
count,
|
||||
};
|
||||
}),
|
||||
createOne: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
datasetId: z.string(),
|
||||
input: z.string(),
|
||||
output: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
await requireCanModifyDataset(input.datasetId, ctx);
|
||||
|
||||
return await prisma.datasetEntry.create({
|
||||
data: {
|
||||
datasetId: input.datasetId,
|
||||
input: input.input,
|
||||
output: input.output,
|
||||
},
|
||||
});
|
||||
}),
|
||||
|
||||
autogenerateEntries: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
datasetId: z.string(),
|
||||
numToGenerate: z.number(),
|
||||
inputDescription: z.string(),
|
||||
outputDescription: z.string(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
await requireCanModifyDataset(input.datasetId, ctx);
|
||||
|
||||
const dataset = await prisma.dataset.findUnique({
|
||||
where: {
|
||||
id: input.datasetId,
|
||||
},
|
||||
});
|
||||
|
||||
if (!dataset) {
|
||||
throw new Error(`Dataset with id ${input.datasetId} does not exist`);
|
||||
}
|
||||
|
||||
const entries = await autogenerateDatasetEntries(
|
||||
input.numToGenerate,
|
||||
input.inputDescription,
|
||||
input.outputDescription,
|
||||
);
|
||||
|
||||
const createdEntries = await prisma.datasetEntry.createMany({
|
||||
data: entries.map((entry) => ({
|
||||
datasetId: input.datasetId,
|
||||
input: entry.input,
|
||||
output: entry.output,
|
||||
})),
|
||||
});
|
||||
|
||||
return createdEntries;
|
||||
}),
|
||||
|
||||
delete: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const datasetId = (
|
||||
await prisma.datasetEntry.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
})
|
||||
).datasetId;
|
||||
|
||||
await requireCanModifyDataset(datasetId, ctx);
|
||||
|
||||
return await prisma.datasetEntry.delete({
|
||||
where: {
|
||||
id: input.id,
|
||||
},
|
||||
});
|
||||
}),
|
||||
|
||||
update: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
updates: z.object({
|
||||
input: z.string(),
|
||||
output: z.string().optional(),
|
||||
}),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const existing = await prisma.datasetEntry.findUnique({
|
||||
where: {
|
||||
id: input.id,
|
||||
},
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
throw new Error(`dataEntry with id ${input.id} does not exist`);
|
||||
}
|
||||
|
||||
await requireCanModifyDataset(existing.datasetId, ctx);
|
||||
|
||||
return await prisma.datasetEntry.update({
|
||||
where: {
|
||||
id: input.id,
|
||||
},
|
||||
data: {
|
||||
input: input.updates.input,
|
||||
output: input.updates.output,
|
||||
},
|
||||
});
|
||||
}),
|
||||
});
|
||||
@@ -1,88 +0,0 @@
|
||||
import { z } from "zod";
|
||||
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
||||
import { prisma } from "~/server/db";
|
||||
import {
|
||||
requireCanModifyDataset,
|
||||
requireCanModifyProject,
|
||||
requireCanViewDataset,
|
||||
requireCanViewProject,
|
||||
} from "~/utils/accessControl";
|
||||
|
||||
export const datasetsRouter = createTRPCRouter({
|
||||
list: protectedProcedure
|
||||
.input(z.object({ projectId: z.string() }))
|
||||
.query(async ({ input, ctx }) => {
|
||||
await requireCanViewProject(input.projectId, ctx);
|
||||
|
||||
const datasets = await prisma.dataset.findMany({
|
||||
where: {
|
||||
projectId: input.projectId,
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: "desc",
|
||||
},
|
||||
include: {
|
||||
_count: {
|
||||
select: { datasetEntries: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return datasets;
|
||||
}),
|
||||
|
||||
get: publicProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
|
||||
await requireCanViewDataset(input.id, ctx);
|
||||
return await prisma.dataset.findFirstOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
project: true,
|
||||
},
|
||||
});
|
||||
}),
|
||||
|
||||
create: protectedProcedure
|
||||
.input(z.object({ projectId: z.string() }))
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
await requireCanModifyProject(input.projectId, ctx);
|
||||
|
||||
const numDatasets = await prisma.dataset.count({
|
||||
where: {
|
||||
projectId: input.projectId,
|
||||
},
|
||||
});
|
||||
|
||||
return await prisma.dataset.create({
|
||||
data: {
|
||||
name: `Dataset ${numDatasets + 1}`,
|
||||
projectId: input.projectId,
|
||||
},
|
||||
});
|
||||
}),
|
||||
|
||||
update: protectedProcedure
|
||||
.input(z.object({ id: z.string(), updates: z.object({ name: z.string() }) }))
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
await requireCanModifyDataset(input.id, ctx);
|
||||
return await prisma.dataset.update({
|
||||
where: {
|
||||
id: input.id,
|
||||
},
|
||||
data: {
|
||||
name: input.updates.name,
|
||||
},
|
||||
});
|
||||
}),
|
||||
|
||||
delete: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
await requireCanModifyDataset(input.id, ctx);
|
||||
|
||||
await prisma.dataset.delete({
|
||||
where: {
|
||||
id: input.id,
|
||||
},
|
||||
});
|
||||
}),
|
||||
});
|
||||
@@ -85,15 +85,16 @@ export const experimentsRouter = createTRPCRouter({
|
||||
return experimentsWithCounts;
|
||||
}),
|
||||
|
||||
get: publicProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
|
||||
await requireCanViewExperiment(input.id, ctx);
|
||||
get: publicProcedure.input(z.object({ slug: z.string() })).query(async ({ input, ctx }) => {
|
||||
const experiment = await prisma.experiment.findFirstOrThrow({
|
||||
where: { id: input.id },
|
||||
where: { slug: input.slug },
|
||||
include: {
|
||||
project: true,
|
||||
},
|
||||
});
|
||||
|
||||
await requireCanViewExperiment(experiment.id, ctx);
|
||||
|
||||
const canModify = ctx.session?.user.id
|
||||
? await canModifyExperiment(experiment.id, ctx.session?.user.id)
|
||||
: false;
|
||||
@@ -177,6 +178,7 @@ export const experimentsRouter = createTRPCRouter({
|
||||
existingToNewVariantIds.set(variant.id, newVariantId);
|
||||
variantsToCreate.push({
|
||||
...variant,
|
||||
uiId: uuidv4(),
|
||||
id: newVariantId,
|
||||
experimentId: newExperimentId,
|
||||
});
|
||||
@@ -190,6 +192,7 @@ export const experimentsRouter = createTRPCRouter({
|
||||
scenariosToCreate.push({
|
||||
...scenario,
|
||||
id: newScenarioId,
|
||||
uiId: uuidv4(),
|
||||
experimentId: newExperimentId,
|
||||
variableValues: scenario.variableValues as Prisma.InputJsonValue,
|
||||
});
|
||||
@@ -290,7 +293,10 @@ export const experimentsRouter = createTRPCRouter({
|
||||
}),
|
||||
]);
|
||||
|
||||
return newExperimentId;
|
||||
const newExperiment = await prisma.experiment.findUniqueOrThrow({
|
||||
where: { id: newExperimentId },
|
||||
});
|
||||
return newExperiment;
|
||||
}),
|
||||
|
||||
create: protectedProcedure
|
||||
|
||||
113
app/src/server/api/routers/fineTunes.router.ts
Normal file
113
app/src/server/api/routers/fineTunes.router.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { z } from "zod";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { type Prisma } from "@prisma/client";
|
||||
|
||||
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
|
||||
import { prisma } from "~/server/db";
|
||||
import { requireCanViewProject, requireCanModifyProject } from "~/utils/accessControl";
|
||||
import { error, success } from "~/utils/errorHandling/standardResponses";
|
||||
|
||||
export const fineTunesRouter = createTRPCRouter({
|
||||
list: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
page: z.number(),
|
||||
pageSize: z.number(),
|
||||
}),
|
||||
)
|
||||
.query(async ({ input, ctx }) => {
|
||||
const { projectId, page, pageSize } = input;
|
||||
|
||||
await requireCanViewProject(projectId, ctx);
|
||||
|
||||
const fineTunes = await prisma.fineTune.findMany({
|
||||
where: {
|
||||
projectId,
|
||||
},
|
||||
include: {
|
||||
dataset: {
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
datasetEntries: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { createdAt: "asc" },
|
||||
skip: (page - 1) * pageSize,
|
||||
take: pageSize,
|
||||
});
|
||||
|
||||
const count = await prisma.fineTune.count({
|
||||
where: {
|
||||
projectId,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
fineTunes,
|
||||
count,
|
||||
};
|
||||
}),
|
||||
create: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
selectedLogIds: z.array(z.string()),
|
||||
slug: z.string(),
|
||||
baseModel: z.string(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
await requireCanModifyProject(input.projectId, ctx);
|
||||
|
||||
const existingFineTune = await prisma.fineTune.findFirst({
|
||||
where: {
|
||||
slug: input.slug,
|
||||
},
|
||||
});
|
||||
|
||||
if (existingFineTune) {
|
||||
return error("A fine tune with that slug already exists");
|
||||
}
|
||||
|
||||
const newDatasetId = uuidv4();
|
||||
|
||||
const datasetEntriesToCreate: Prisma.DatasetEntryCreateManyDatasetInput[] =
|
||||
input.selectedLogIds.map((loggedCallId) => ({
|
||||
loggedCallId,
|
||||
}));
|
||||
|
||||
await prisma.$transaction([
|
||||
prisma.dataset.create({
|
||||
data: {
|
||||
id: newDatasetId,
|
||||
name: input.slug,
|
||||
project: {
|
||||
connect: {
|
||||
id: input.projectId,
|
||||
},
|
||||
},
|
||||
datasetEntries: {
|
||||
createMany: {
|
||||
data: datasetEntriesToCreate,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
prisma.fineTune.create({
|
||||
data: {
|
||||
projectId: input.projectId,
|
||||
slug: input.slug,
|
||||
baseModel: input.baseModel,
|
||||
datasetId: newDatasetId,
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
return success();
|
||||
}),
|
||||
});
|
||||
@@ -1,11 +1,16 @@
|
||||
import { z } from "zod";
|
||||
import { type Expression, type SqlBool, sql, type RawBuilder } from "kysely";
|
||||
import { jsonArrayFrom } from "kysely/helpers/postgres";
|
||||
import archiver from "archiver";
|
||||
import { WritableStreamBuffer } from "stream-buffers";
|
||||
import { type JsonValue } from "type-fest";
|
||||
import { shuffle } from "lodash-es";
|
||||
|
||||
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
|
||||
import { kysely, prisma } from "~/server/db";
|
||||
import { comparators, defaultFilterableFields } from "~/state/logFiltersSlice";
|
||||
import { requireCanViewProject } from "~/utils/accessControl";
|
||||
import hashObject from "~/server/utils/hashObject";
|
||||
|
||||
// create comparator type based off of comparators
|
||||
const comparatorToSqlExpression = (comparator: (typeof comparators)[number], value: string) => {
|
||||
@@ -180,4 +185,102 @@ export const loggedCallsRouter = createTRPCRouter({
|
||||
|
||||
return tags.map((tag) => tag.name);
|
||||
}),
|
||||
export: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
selectedLogIds: z.string().array(),
|
||||
testingSplit: z.number(),
|
||||
selectedExportFormat: z.string(),
|
||||
removeDuplicates: z.boolean(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
await requireCanViewProject(input.projectId, ctx);
|
||||
|
||||
// Fetch the real data using Prisma
|
||||
const loggedCallsFromDb = await ctx.prisma.loggedCallModelResponse.findMany({
|
||||
where: {
|
||||
originalLoggedCall: {
|
||||
projectId: input.projectId,
|
||||
id: { in: input.selectedLogIds },
|
||||
},
|
||||
statusCode: 200,
|
||||
},
|
||||
});
|
||||
|
||||
// Convert the database data into the desired format
|
||||
let formattedLoggedCalls: { instruction: JsonValue[]; output: JsonValue }[] =
|
||||
loggedCallsFromDb.map((call) => ({
|
||||
instruction: (call.reqPayload as unknown as Record<string, unknown>)
|
||||
.messages as JsonValue[],
|
||||
output: (call.respPayload as unknown as { choices: { message: unknown }[] }).choices[0]
|
||||
?.message as JsonValue,
|
||||
}));
|
||||
|
||||
if (input.removeDuplicates) {
|
||||
const deduplicatedLoggedCalls = [];
|
||||
const loggedCallHashSet = new Set<string>();
|
||||
for (const loggedCall of formattedLoggedCalls) {
|
||||
const loggedCallHash = hashObject(loggedCall);
|
||||
if (!loggedCallHashSet.has(loggedCallHash)) {
|
||||
loggedCallHashSet.add(loggedCallHash);
|
||||
deduplicatedLoggedCalls.push(loggedCall);
|
||||
}
|
||||
}
|
||||
formattedLoggedCalls = deduplicatedLoggedCalls;
|
||||
}
|
||||
|
||||
// Remove duplicate messages from instructions
|
||||
const instructionMessageHashMap = new Map<string, number>();
|
||||
for (const loggedCall of formattedLoggedCalls) {
|
||||
for (const message of loggedCall.instruction) {
|
||||
const hash = hashObject(message);
|
||||
if (instructionMessageHashMap.has(hash)) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
instructionMessageHashMap.set(hash, instructionMessageHashMap.get(hash)! + 1);
|
||||
} else {
|
||||
instructionMessageHashMap.set(hash, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const loggedCall of formattedLoggedCalls) {
|
||||
loggedCall.instruction = loggedCall.instruction.filter((message) => {
|
||||
const hash = hashObject(message);
|
||||
// If the same message appears in a single instruction multiple times, there is some danger of
|
||||
// it being removed from all logged calls. This is enough of an edge case that we don't
|
||||
// need to worry about it for now.
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
return instructionMessageHashMap.get(hash)! < formattedLoggedCalls.length;
|
||||
});
|
||||
}
|
||||
|
||||
// Stringify instructions and outputs
|
||||
const stringifiedLoggedCalls = shuffle(formattedLoggedCalls).map((loggedCall) => ({
|
||||
instruction: JSON.stringify(loggedCall.instruction),
|
||||
output: JSON.stringify(loggedCall.output),
|
||||
}));
|
||||
|
||||
const splitIndex = Math.floor((stringifiedLoggedCalls.length * input.testingSplit) / 100);
|
||||
|
||||
const testingData = stringifiedLoggedCalls.slice(0, splitIndex);
|
||||
const trainingData = stringifiedLoggedCalls.slice(splitIndex);
|
||||
|
||||
// Convert arrays to JSONL format
|
||||
const trainingDataJSONL = trainingData.map((item) => JSON.stringify(item)).join("\n");
|
||||
const testingDataJSONL = testingData.map((item) => JSON.stringify(item)).join("\n");
|
||||
|
||||
const output = new WritableStreamBuffer();
|
||||
const archive = archiver("zip");
|
||||
|
||||
archive.pipe(output);
|
||||
archive.append(trainingDataJSONL, { name: "train.jsonl" });
|
||||
archive.append(testingDataJSONL, { name: "test.jsonl" });
|
||||
await archive.finalize();
|
||||
|
||||
// Convert buffer to base64
|
||||
const base64 = output.getContents().toString("base64");
|
||||
|
||||
return base64;
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -61,7 +61,7 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
||||
evalsComplete,
|
||||
};
|
||||
}),
|
||||
forceRefetch: protectedProcedure
|
||||
hardRefetch: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
scenarioId: z.string(),
|
||||
@@ -85,7 +85,10 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
||||
});
|
||||
|
||||
if (!cell) {
|
||||
await generateNewCell(input.variantId, input.scenarioId, { stream: true });
|
||||
await generateNewCell(input.variantId, input.scenarioId, {
|
||||
stream: true,
|
||||
hardRefetch: true,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -96,7 +99,7 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
||||
},
|
||||
});
|
||||
|
||||
await queueQueryModel(cell.id, true);
|
||||
await queueQueryModel(cell.id, { stream: true, hardRefetch: true });
|
||||
}),
|
||||
getTemplatedPromptMessage: publicProcedure
|
||||
.input(
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import "dotenv/config";
|
||||
import { openai } from "../utils/openai";
|
||||
|
||||
const resp = await openai.chat.completions.create({
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
stream: true,
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: "count to 20",
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
for await (const part of resp) {
|
||||
console.log("part", part);
|
||||
}
|
||||
|
||||
console.log("final resp", resp);
|
||||
@@ -1,4 +1,4 @@
|
||||
import { type Helpers, type Task, makeWorkerUtils } from "graphile-worker";
|
||||
import { type Helpers, type Task, makeWorkerUtils, TaskSpec } from "graphile-worker";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
let workerUtilsPromise: ReturnType<typeof makeWorkerUtils> | null = null;
|
||||
@@ -16,9 +16,11 @@ function defineTask<TPayload>(
|
||||
taskIdentifier: string,
|
||||
taskHandler: (payload: TPayload, helpers: Helpers) => Promise<void>,
|
||||
) {
|
||||
const enqueue = async (payload: TPayload, runAt?: Date) => {
|
||||
const enqueue = async (payload: TPayload, spec?: TaskSpec) => {
|
||||
console.log("Enqueuing task", taskIdentifier, payload);
|
||||
await (await workerUtils()).addJob(taskIdentifier, payload, { runAt });
|
||||
|
||||
const utils = await workerUtils();
|
||||
return await utils.addJob(taskIdentifier, payload, spec);
|
||||
};
|
||||
|
||||
const handler = (payload: TPayload, helpers: Helpers) => {
|
||||
|
||||
@@ -25,7 +25,6 @@ function calculateDelay(numPreviousTries: number): number {
|
||||
}
|
||||
|
||||
export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) => {
|
||||
console.log("RUNNING TASK", task);
|
||||
const { cellId, stream, numPreviousTries } = task;
|
||||
const cell = await prisma.scenarioVariantCell.findUnique({
|
||||
where: { id: cellId },
|
||||
@@ -153,7 +152,7 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
|
||||
stream,
|
||||
numPreviousTries: numPreviousTries + 1,
|
||||
},
|
||||
retryTime,
|
||||
{ runAt: retryTime, jobKey: cellId, priority: 3 },
|
||||
);
|
||||
await prisma.scenarioVariantCell.update({
|
||||
where: { id: cellId },
|
||||
@@ -172,7 +171,13 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
|
||||
}
|
||||
});
|
||||
|
||||
export const queueQueryModel = async (cellId: string, stream: boolean) => {
|
||||
export const queueQueryModel = async (
|
||||
cellId: string,
|
||||
options: { stream?: boolean; hardRefetch?: boolean } = {},
|
||||
) => {
|
||||
// Hard refetches are higher priority than streamed queries, which are higher priority than non-streamed queries.
|
||||
const jobPriority = options.hardRefetch ? 0 : options.stream ? 1 : 2;
|
||||
|
||||
await Promise.all([
|
||||
prisma.scenarioVariantCell.update({
|
||||
where: {
|
||||
@@ -184,6 +189,13 @@ export const queueQueryModel = async (cellId: string, stream: boolean) => {
|
||||
jobQueuedAt: new Date(),
|
||||
},
|
||||
}),
|
||||
queryModel.enqueue({ cellId, stream, numPreviousTries: 0 }),
|
||||
|
||||
queryModel.enqueue(
|
||||
{ cellId, stream: options.stream ?? false, numPreviousTries: 0 },
|
||||
|
||||
// Streamed queries are higher priority than non-streamed queries. Lower
|
||||
// numbers are higher priority in graphile-worker.
|
||||
{ jobKey: cellId, priority: jobPriority },
|
||||
),
|
||||
]);
|
||||
};
|
||||
|
||||
@@ -13,5 +13,6 @@ export const runNewEval = defineTask<RunNewEvalJob>("runNewEval", async (task) =
|
||||
});
|
||||
|
||||
export const queueRunNewEval = async (experimentId: string) => {
|
||||
await runNewEval.enqueue({ experimentId });
|
||||
// Evals are lower priority than completions
|
||||
await runNewEval.enqueue({ experimentId }, { priority: 4 });
|
||||
};
|
||||
|
||||
47
app/src/server/tasks/test-tasks.ts
Normal file
47
app/src/server/tasks/test-tasks.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import "dotenv/config";
|
||||
|
||||
import defineTask from "./defineTask";
|
||||
import { type TaskList, run } from "graphile-worker";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
import "../../../sentry.server.config";
|
||||
|
||||
export type TestTask = { i: number };
|
||||
|
||||
// When a new eval is created, we want to run it on all existing outputs, but return the new eval first
|
||||
export const testTask = defineTask<TestTask>("testTask", (task) => {
|
||||
console.log("ran task ", task.i);
|
||||
|
||||
void new Promise((_resolve, reject) => setTimeout(reject, 500));
|
||||
return Promise.resolve();
|
||||
});
|
||||
|
||||
const registeredTasks = [testTask];
|
||||
|
||||
const taskList = registeredTasks.reduce((acc, task) => {
|
||||
acc[task.task.identifier] = task.task.handler;
|
||||
return acc;
|
||||
}, {} as TaskList);
|
||||
|
||||
// process.on("unhandledRejection", (reason, promise) => {
|
||||
// console.log("Unhandled Rejection at:", reason?.stack || reason);
|
||||
// });
|
||||
|
||||
// Run a worker to execute jobs:
|
||||
const runner = await run({
|
||||
connectionString: env.DATABASE_URL,
|
||||
concurrency: 10,
|
||||
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
|
||||
noHandleSignals: false,
|
||||
pollInterval: 1000,
|
||||
taskList,
|
||||
});
|
||||
|
||||
console.log("Worker successfully started");
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await testTask.enqueue({ i });
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
|
||||
await runner.promise;
|
||||
@@ -1,5 +1,6 @@
|
||||
import { type TaskList, run } from "graphile-worker";
|
||||
import "dotenv/config";
|
||||
import "../../../sentry.server.config";
|
||||
|
||||
import { env } from "~/env.mjs";
|
||||
import { queryModel } from "./queryModel.task";
|
||||
@@ -17,7 +18,8 @@ const taskList = registeredTasks.reduce((acc, task) => {
|
||||
// Run a worker to execute jobs:
|
||||
const runner = await run({
|
||||
connectionString: env.DATABASE_URL,
|
||||
concurrency: 10,
|
||||
concurrency: env.WORKER_CONCURRENCY,
|
||||
maxPoolSize: env.WORKER_MAX_POOL_SIZE,
|
||||
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
|
||||
noHandleSignals: false,
|
||||
pollInterval: 1000,
|
||||
|
||||
@@ -41,7 +41,7 @@ const requestUpdatedPromptFunction = async (
|
||||
) => {
|
||||
const originalModelProvider = modelProviders[originalVariant.modelProvider as SupportedProvider];
|
||||
const originalModel = originalModelProvider.models[originalVariant.model] as Model;
|
||||
let newContructionFn = "";
|
||||
let newConstructionFn = "";
|
||||
for (let i = 0; i < NUM_RETRIES; i++) {
|
||||
try {
|
||||
const messages: CreateChatCompletionRequestMessage[] = [
|
||||
@@ -109,6 +109,12 @@ const requestUpdatedPromptFunction = async (
|
||||
function_call: {
|
||||
name: "update_prompt_constructor_function",
|
||||
},
|
||||
openpipe: {
|
||||
tags: {
|
||||
prompt_id: "deriveNewConstructFn",
|
||||
model_translation: (!!newModel).toString(),
|
||||
},
|
||||
},
|
||||
});
|
||||
const argString = completion.choices[0]?.message?.function_call?.arguments || "{}";
|
||||
|
||||
@@ -131,7 +137,7 @@ const requestUpdatedPromptFunction = async (
|
||||
const args = await contructPromptFunctionArgs.copy(); // Get the actual value from the isolate
|
||||
|
||||
if (args && isObject(args) && "new_prompt_function" in args) {
|
||||
newContructionFn = await formatPromptConstructor(args.new_prompt_function as string);
|
||||
newConstructionFn = await formatPromptConstructor(args.new_prompt_function as string);
|
||||
break;
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -139,5 +145,5 @@ const requestUpdatedPromptFunction = async (
|
||||
}
|
||||
}
|
||||
|
||||
return newContructionFn;
|
||||
return newConstructionFn;
|
||||
};
|
||||
|
||||
@@ -9,10 +9,8 @@ import parsePromptConstructor from "~/promptConstructor/parse";
|
||||
export const generateNewCell = async (
|
||||
variantId: string,
|
||||
scenarioId: string,
|
||||
options?: { stream?: boolean },
|
||||
options: { stream?: boolean; hardRefetch?: boolean } = {},
|
||||
): Promise<void> => {
|
||||
const stream = options?.stream ?? false;
|
||||
|
||||
const variant = await prisma.promptVariant.findUnique({
|
||||
where: {
|
||||
id: variantId,
|
||||
@@ -121,6 +119,6 @@ export const generateNewCell = async (
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
await queueQueryModel(cell.id, stream);
|
||||
await queueQueryModel(cell.id, options);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -17,13 +17,7 @@ try {
|
||||
// Set a dummy key so it doesn't fail at build time
|
||||
config = {
|
||||
apiKey: env.OPENAI_API_KEY ?? "dummy-key",
|
||||
openpipe: {
|
||||
apiKey: env.OPENPIPE_API_KEY,
|
||||
baseUrl: "http://localhost:3000/api/v1",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// export const openai = env.OPENPIPE_API_KEY ? new OpenAI.OpenAI(config) : new OriginalOpenAI(config);
|
||||
|
||||
export const openai = new OpenAI(config);
|
||||
|
||||
@@ -53,6 +53,11 @@ export const runGpt4Eval = async (
|
||||
},
|
||||
},
|
||||
],
|
||||
openpipe: {
|
||||
tags: {
|
||||
prompt_id: "runOneEval",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
|
||||
37
app/src/state/columnVisiblitySlice.ts
Normal file
37
app/src/state/columnVisiblitySlice.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { type SliceCreator } from "./store";
|
||||
|
||||
export const comparators = ["=", "!=", "CONTAINS", "NOT_CONTAINS"] as const;
|
||||
|
||||
export const defaultFilterableFields = ["Request", "Response", "Model", "Status Code"] as const;
|
||||
|
||||
export enum StaticColumnKeys {
|
||||
SENT_AT = "sentAt",
|
||||
MODEL = "model",
|
||||
DURATION = "duration",
|
||||
INPUT_TOKENS = "inputTokens",
|
||||
OUTPUT_TOKENS = "outputTokens",
|
||||
STATUS_CODE = "statusCode",
|
||||
}
|
||||
|
||||
export type ColumnVisibilitySlice = {
|
||||
visibleColumns: Set<string>;
|
||||
toggleColumnVisibility: (columnKey: string) => void;
|
||||
showAllColumns: (columnKeys: string[]) => void;
|
||||
};
|
||||
|
||||
export const createColumnVisibilitySlice: SliceCreator<ColumnVisibilitySlice> = (set, get) => ({
|
||||
// initialize with all static columns visible
|
||||
visibleColumns: new Set(Object.values(StaticColumnKeys)),
|
||||
toggleColumnVisibility: (columnKey: string) =>
|
||||
set((state) => {
|
||||
if (state.columnVisibility.visibleColumns.has(columnKey)) {
|
||||
state.columnVisibility.visibleColumns.delete(columnKey);
|
||||
} else {
|
||||
state.columnVisibility.visibleColumns.add(columnKey);
|
||||
}
|
||||
}),
|
||||
showAllColumns: (columnKeys: string[]) =>
|
||||
set((state) => {
|
||||
state.columnVisibility.visibleColumns = new Set(columnKeys);
|
||||
}),
|
||||
});
|
||||
23
app/src/state/featureFlags.ts
Normal file
23
app/src/state/featureFlags.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { type SliceCreator } from "./store";
|
||||
|
||||
export type FeatureFlagsSlice = {
|
||||
flagsLoaded: boolean;
|
||||
featureFlags: {
|
||||
betaAccess: boolean;
|
||||
};
|
||||
setFeatureFlags: (flags: string[] | undefined) => void;
|
||||
};
|
||||
|
||||
export const createFeatureFlagsSlice: SliceCreator<FeatureFlagsSlice> = (set) => ({
|
||||
flagsLoaded: false,
|
||||
featureFlags: {
|
||||
betaAccess: false,
|
||||
},
|
||||
setFeatureFlags: (flags) =>
|
||||
set((state) => {
|
||||
state.featureFlags.featureFlags = {
|
||||
betaAccess: flags?.includes("betaAccess") ?? false,
|
||||
};
|
||||
state.featureFlags.flagsLoaded = true;
|
||||
}),
|
||||
});
|
||||
@@ -1,13 +1,27 @@
|
||||
import { type PersistOptions } from "zustand/middleware/persist";
|
||||
import { type State } from "./store";
|
||||
import SuperJSON from "superjson";
|
||||
import { merge, pick } from "lodash-es";
|
||||
import { type PartialDeep } from "type-fest";
|
||||
|
||||
export const stateToPersist = {
|
||||
selectedProjectId: null as string | null,
|
||||
};
|
||||
export type PersistedState = PartialDeep<State>;
|
||||
|
||||
export const persistOptions: PersistOptions<State, typeof stateToPersist> = {
|
||||
export const persistOptions: PersistOptions<State, PersistedState> = {
|
||||
name: "persisted-app-store",
|
||||
partialize: (state) => ({
|
||||
selectedProjectId: state.selectedProjectId,
|
||||
columnVisibility: pick(state.columnVisibility, ["visibleColumns"]),
|
||||
}),
|
||||
merge: (saved, state) => merge(state, saved),
|
||||
storage: {
|
||||
getItem: (key) => {
|
||||
const data = localStorage.getItem(key);
|
||||
return data ? SuperJSON.parse(data) : null;
|
||||
},
|
||||
setItem: (key, value) => localStorage.setItem(key, SuperJSON.stringify(value)),
|
||||
removeItem: (key) => localStorage.removeItem(key),
|
||||
},
|
||||
onRehydrateStorage: (state) => {
|
||||
if (state) state.isRehydrated = true;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -8,13 +8,16 @@ import {
|
||||
createVariantEditorSlice,
|
||||
} from "./sharedVariantEditor.slice";
|
||||
import { type APIClient } from "~/utils/api";
|
||||
import { persistOptions, type stateToPersist } from "./persist";
|
||||
import { type PersistedState, persistOptions } from "./persist";
|
||||
import { type SelectedLogsSlice, createSelectedLogsSlice } from "./selectedLogsSlice";
|
||||
import { type LogFiltersSlice, createLogFiltersSlice } from "./logFiltersSlice";
|
||||
import { type ColumnVisibilitySlice, createColumnVisibilitySlice } from "./columnVisiblitySlice";
|
||||
import { type FeatureFlagsSlice, createFeatureFlagsSlice } from "./featureFlags";
|
||||
|
||||
enableMapSet();
|
||||
|
||||
export type State = {
|
||||
isRehydrated: boolean;
|
||||
drawerOpen: boolean;
|
||||
openDrawer: () => void;
|
||||
closeDrawer: () => void;
|
||||
@@ -25,6 +28,8 @@ export type State = {
|
||||
setSelectedProjectId: (id: string) => void;
|
||||
selectedLogs: SelectedLogsSlice;
|
||||
logFilters: LogFiltersSlice;
|
||||
columnVisibility: ColumnVisibilitySlice;
|
||||
featureFlags: FeatureFlagsSlice;
|
||||
};
|
||||
|
||||
export type SliceCreator<T> = StateCreator<State, [["zustand/immer", never]], [], T>;
|
||||
@@ -32,18 +37,15 @@ export type SliceCreator<T> = StateCreator<State, [["zustand/immer", never]], []
|
||||
export type SetFn = Parameters<SliceCreator<unknown>>[0];
|
||||
export type GetFn = Parameters<SliceCreator<unknown>>[1];
|
||||
|
||||
const useBaseStore = create<
|
||||
State,
|
||||
[["zustand/persist", typeof stateToPersist], ["zustand/immer", never]]
|
||||
>(
|
||||
const useBaseStore = create<State, [["zustand/persist", PersistedState], ["zustand/immer", never]]>(
|
||||
persist(
|
||||
immer((set, get, ...rest) => ({
|
||||
isRehydrated: false,
|
||||
api: null,
|
||||
setApi: (api) =>
|
||||
set((state) => {
|
||||
state.api = api;
|
||||
}),
|
||||
|
||||
drawerOpen: false,
|
||||
openDrawer: () =>
|
||||
set((state) => {
|
||||
@@ -61,6 +63,8 @@ const useBaseStore = create<
|
||||
}),
|
||||
selectedLogs: createSelectedLogsSlice(set, get, ...rest),
|
||||
logFilters: createLogFiltersSlice(set, get, ...rest),
|
||||
columnVisibility: createColumnVisibilitySlice(set, get, ...rest),
|
||||
featureFlags: createFeatureFlagsSlice(set, get, ...rest),
|
||||
})),
|
||||
persistOptions,
|
||||
),
|
||||
|
||||
@@ -78,33 +78,6 @@ export const requireCanModifyProject = async (projectId: string, ctx: TRPCContex
|
||||
}
|
||||
};
|
||||
|
||||
export const requireCanViewDataset = async (datasetId: string, ctx: TRPCContext) => {
|
||||
ctx.markAccessControlRun();
|
||||
|
||||
const dataset = await prisma.dataset.findFirst({
|
||||
where: {
|
||||
id: datasetId,
|
||||
project: {
|
||||
projectUsers: {
|
||||
some: {
|
||||
role: { in: [ProjectUserRole.ADMIN, ProjectUserRole.MEMBER] },
|
||||
userId: ctx.session?.user.id,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!dataset) {
|
||||
throw new TRPCError({ code: "UNAUTHORIZED" });
|
||||
}
|
||||
};
|
||||
|
||||
export const requireCanModifyDataset = async (datasetId: string, ctx: TRPCContext) => {
|
||||
// Right now all users who can view a dataset can also modify it
|
||||
await requireCanViewDataset(datasetId, ctx);
|
||||
};
|
||||
|
||||
export const requireCanViewExperiment = (experimentId: string, ctx: TRPCContext): Promise<void> => {
|
||||
// Right now all experiments are publicly viewable, so this is a no-op.
|
||||
ctx.markAccessControlRun();
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
"use client";
|
||||
import { useSession } from "next-auth/react";
|
||||
import React, { type ReactNode, useEffect } from "react";
|
||||
import { PostHogProvider } from "posthog-js/react";
|
||||
import { PostHogProvider, useActiveFeatureFlags } from "posthog-js/react";
|
||||
|
||||
import posthog from "posthog-js";
|
||||
import { env } from "~/env.mjs";
|
||||
import { useRouter } from "next/router";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
// Make sure we're in the browser
|
||||
const inBrowser = typeof window !== "undefined";
|
||||
@@ -24,6 +25,14 @@ export const PosthogAppProvider = ({ children }: { children: ReactNode }) => {
|
||||
};
|
||||
}, [router.events]);
|
||||
|
||||
const setFeatureFlags = useAppStore((s) => s.featureFlags.setFeatureFlags);
|
||||
const activeFlags = useActiveFeatureFlags();
|
||||
useEffect(() => {
|
||||
if (activeFlags) {
|
||||
setFeatureFlags(activeFlags);
|
||||
}
|
||||
}, [activeFlags, setFeatureFlags]);
|
||||
|
||||
useEffect(() => {
|
||||
if (env.NEXT_PUBLIC_POSTHOG_KEY && inBrowser && session && session.user) {
|
||||
posthog.init(env.NEXT_PUBLIC_POSTHOG_KEY, {
|
||||
|
||||
@@ -15,8 +15,8 @@ export const useExperiments = () => {
|
||||
export const useExperiment = () => {
|
||||
const router = useRouter();
|
||||
const experiment = api.experiments.get.useQuery(
|
||||
{ id: router.query.id as string },
|
||||
{ enabled: !!router.query.id },
|
||||
{ slug: router.query.experimentSlug as string },
|
||||
{ enabled: !!router.query.experimentSlug },
|
||||
);
|
||||
|
||||
return experiment;
|
||||
@@ -26,34 +26,6 @@ export const useExperimentAccess = () => {
|
||||
return useExperiment().data?.access ?? { canView: false, canModify: false };
|
||||
};
|
||||
|
||||
export const useDatasets = () => {
|
||||
const selectedProjectId = useAppStore((state) => state.selectedProjectId);
|
||||
return api.datasets.list.useQuery(
|
||||
{ projectId: selectedProjectId ?? "" },
|
||||
{ enabled: !!selectedProjectId },
|
||||
);
|
||||
};
|
||||
|
||||
export const useDataset = () => {
|
||||
const router = useRouter();
|
||||
const dataset = api.datasets.get.useQuery(
|
||||
{ id: router.query.id as string },
|
||||
{ enabled: !!router.query.id },
|
||||
);
|
||||
|
||||
return dataset;
|
||||
};
|
||||
|
||||
export const useDatasetEntries = () => {
|
||||
const dataset = useDataset();
|
||||
const { page, pageSize } = usePageParams();
|
||||
|
||||
return api.datasetEntries.list.useQuery(
|
||||
{ datasetId: dataset.data?.id ?? "", page, pageSize },
|
||||
{ enabled: dataset.data?.id != null },
|
||||
);
|
||||
};
|
||||
|
||||
type AsyncFunction<T extends unknown[], U> = (...args: T) => Promise<U>;
|
||||
|
||||
export function useHandledAsyncCallback<T extends unknown[], U>(
|
||||
@@ -205,3 +177,22 @@ export const useTagNames = () => {
|
||||
{ enabled: !!selectedProjectId },
|
||||
);
|
||||
};
|
||||
|
||||
export const useFineTunes = () => {
|
||||
const selectedProjectId = useAppStore((state) => state.selectedProjectId);
|
||||
const { page, pageSize } = usePageParams();
|
||||
|
||||
return api.fineTunes.list.useQuery(
|
||||
{ projectId: selectedProjectId ?? "", page, pageSize },
|
||||
{ enabled: !!selectedProjectId },
|
||||
);
|
||||
};
|
||||
|
||||
export const useIsClientRehydrated = () => {
|
||||
const isRehydrated = useAppStore((state) => state.isRehydrated);
|
||||
const [isMounted, setIsMounted] = useState(false);
|
||||
useEffect(() => {
|
||||
setIsMounted(true);
|
||||
}, []);
|
||||
return isRehydrated && isMounted;
|
||||
};
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
source app/.env
|
||||
|
||||
docker build . --file app/Dockerfile
|
||||
@@ -141,9 +141,19 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"ok"
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"ok"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"error"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
40
client-libs/python/README.md
Normal file
40
client-libs/python/README.md
Normal file
@@ -0,0 +1,40 @@
|
||||
# OpenPipe Python Client
|
||||
|
||||
This client allows you automatically report your OpenAI calls to [OpenPipe](https://openpipe.ai/). OpenPipe
|
||||
|
||||
## Installation
|
||||
`pip install openpipe`
|
||||
|
||||
## Usage
|
||||
|
||||
1. Create a project at https://app.openpipe.ai
|
||||
2. Find your project's API key at https://app.openpipe.ai/project/settings
|
||||
3. Configure the OpenPipe client as shown below.
|
||||
|
||||
```python
|
||||
from openpipe import openai, configure_openpipe
|
||||
import os
|
||||
|
||||
# Set the OpenPipe API key you got in step (3) above.
|
||||
# If you have the `OPENPIPE_API_KEY` environment variable set we'll read from it by default.
|
||||
configure_openpipe(api_key=os.getenv("OPENPIPE_API_KEY"))
|
||||
|
||||
# Configure OpenAI the same way you would normally
|
||||
openai.api_key = os.getenv("OPENAI_API_KEY")
|
||||
```
|
||||
|
||||
You can use the OpenPipe client for normal
|
||||
|
||||
## Special Features
|
||||
|
||||
### Tagging
|
||||
|
||||
OpenPipe has a concept of "tagging." This is very useful for grouping a certain set of completions together. When you're using a dataset for fine-tuning, you can select all the prompts that match a certain set of tags. Here's how you can use the tagging feature:
|
||||
|
||||
```python
|
||||
completion = openai.ChatCompletion.create(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[{"role": "system", "content": "count to 10"}],
|
||||
openpipe={"tags": {"prompt_id": "counting"}},
|
||||
)
|
||||
```
|
||||
202
client-libs/python/openpipe/LICENSE
Normal file
202
client-libs/python/openpipe/LICENSE
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
@@ -13,7 +13,8 @@ from .local_testing_only_get_latest_logged_call_response_200_tags import (
|
||||
from .report_json_body import ReportJsonBody
|
||||
from .report_json_body_tags import ReportJsonBodyTags
|
||||
from .report_response_200 import ReportResponse200
|
||||
from .report_response_200_status import ReportResponse200Status
|
||||
from .report_response_200_status_type_0 import ReportResponse200StatusType0
|
||||
from .report_response_200_status_type_1 import ReportResponse200StatusType1
|
||||
|
||||
__all__ = (
|
||||
"CheckCacheJsonBody",
|
||||
@@ -25,5 +26,6 @@ __all__ = (
|
||||
"ReportJsonBody",
|
||||
"ReportJsonBodyTags",
|
||||
"ReportResponse200",
|
||||
"ReportResponse200Status",
|
||||
"ReportResponse200StatusType0",
|
||||
"ReportResponse200StatusType1",
|
||||
)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from typing import Any, Dict, Type, TypeVar
|
||||
from typing import Any, Dict, Type, TypeVar, Union
|
||||
|
||||
from attrs import define
|
||||
|
||||
from ..models.report_response_200_status import ReportResponse200Status
|
||||
from ..models.report_response_200_status_type_0 import ReportResponse200StatusType0
|
||||
from ..models.report_response_200_status_type_1 import ReportResponse200StatusType1
|
||||
|
||||
T = TypeVar("T", bound="ReportResponse200")
|
||||
|
||||
@@ -11,13 +12,19 @@ T = TypeVar("T", bound="ReportResponse200")
|
||||
class ReportResponse200:
|
||||
"""
|
||||
Attributes:
|
||||
status (ReportResponse200Status):
|
||||
status (Union[ReportResponse200StatusType0, ReportResponse200StatusType1]):
|
||||
"""
|
||||
|
||||
status: ReportResponse200Status
|
||||
status: Union[ReportResponse200StatusType0, ReportResponse200StatusType1]
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
status = self.status.value
|
||||
status: str
|
||||
|
||||
if isinstance(self.status, ReportResponse200StatusType0):
|
||||
status = self.status.value
|
||||
|
||||
else:
|
||||
status = self.status.value
|
||||
|
||||
field_dict: Dict[str, Any] = {}
|
||||
field_dict.update(
|
||||
@@ -31,7 +38,23 @@ class ReportResponse200:
|
||||
@classmethod
|
||||
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
||||
d = src_dict.copy()
|
||||
status = ReportResponse200Status(d.pop("status"))
|
||||
|
||||
def _parse_status(data: object) -> Union[ReportResponse200StatusType0, ReportResponse200StatusType1]:
|
||||
try:
|
||||
if not isinstance(data, str):
|
||||
raise TypeError()
|
||||
status_type_0 = ReportResponse200StatusType0(data)
|
||||
|
||||
return status_type_0
|
||||
except: # noqa: E722
|
||||
pass
|
||||
if not isinstance(data, str):
|
||||
raise TypeError()
|
||||
status_type_1 = ReportResponse200StatusType1(data)
|
||||
|
||||
return status_type_1
|
||||
|
||||
status = _parse_status(d.pop("status"))
|
||||
|
||||
report_response_200 = cls(
|
||||
status=status,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class ReportResponse200Status(str, Enum):
|
||||
class ReportResponse200StatusType0(str, Enum):
|
||||
OK = "ok"
|
||||
|
||||
def __str__(self) -> str:
|
||||
@@ -0,0 +1,8 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class ReportResponse200StatusType1(str, Enum):
|
||||
ERROR = "error"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.value)
|
||||
@@ -4,7 +4,7 @@ import time
|
||||
import inspect
|
||||
|
||||
from openpipe.merge_openai_chunks import merge_openai_chunks
|
||||
from openpipe.openpipe_meta import OpenPipeMeta
|
||||
from openpipe.openpipe_meta import openpipe_meta
|
||||
|
||||
from .shared import (
|
||||
_should_check_cache,
|
||||
@@ -41,9 +41,11 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
|
||||
)
|
||||
|
||||
cache_status = (
|
||||
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
|
||||
"MISS"
|
||||
if _should_check_cache(openpipe_options, kwargs)
|
||||
else "SKIP"
|
||||
)
|
||||
chunk.openpipe = OpenPipeMeta(cache_status=cache_status)
|
||||
chunk.openpipe = openpipe_meta(cache_status=cache_status)
|
||||
|
||||
yield chunk
|
||||
|
||||
@@ -72,9 +74,9 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
|
||||
)
|
||||
|
||||
cache_status = (
|
||||
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
|
||||
"MISS" if _should_check_cache(openpipe_options, kwargs) else "SKIP"
|
||||
)
|
||||
chat_completion["openpipe"] = OpenPipeMeta(cache_status=cache_status)
|
||||
chat_completion["openpipe"] = openpipe_meta(cache_status=cache_status)
|
||||
return chat_completion
|
||||
except Exception as e:
|
||||
received_at = int(time.time() * 1000)
|
||||
@@ -126,9 +128,11 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
|
||||
assembled_completion, chunk
|
||||
)
|
||||
cache_status = (
|
||||
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
|
||||
"MISS"
|
||||
if _should_check_cache(openpipe_options, kwargs)
|
||||
else "SKIP"
|
||||
)
|
||||
chunk.openpipe = OpenPipeMeta(cache_status=cache_status)
|
||||
chunk.openpipe = openpipe_meta(cache_status=cache_status)
|
||||
|
||||
yield chunk
|
||||
|
||||
@@ -157,9 +161,9 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
|
||||
)
|
||||
|
||||
cache_status = (
|
||||
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
|
||||
"MISS" if _should_check_cache(openpipe_options, kwargs) else "SKIP"
|
||||
)
|
||||
chat_completion["openpipe"] = OpenPipeMeta(cache_status=cache_status)
|
||||
chat_completion["openpipe"] = openpipe_meta(cache_status=cache_status)
|
||||
|
||||
return chat_completion
|
||||
except Exception as e:
|
||||
|
||||
@@ -1,7 +1,2 @@
|
||||
from attr import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class OpenPipeMeta:
|
||||
# Cache status. One of 'HIT', 'MISS', 'SKIP'
|
||||
cache_status: str
|
||||
def openpipe_meta(cache_status: str):
|
||||
return {"cache_status": cache_status}
|
||||
|
||||
@@ -8,6 +8,7 @@ from openpipe.api_client.models.report_json_body_tags import (
|
||||
)
|
||||
import toml
|
||||
import time
|
||||
import os
|
||||
|
||||
version = toml.load("pyproject.toml")["tool"]["poetry"]["version"]
|
||||
|
||||
@@ -15,6 +16,9 @@ configured_client = AuthenticatedClient(
|
||||
base_url="https://app.openpipe.ai/api/v1", token=""
|
||||
)
|
||||
|
||||
if os.environ.get("OPENPIPE_API_KEY"):
|
||||
configured_client.token = os.environ["OPENPIPE_API_KEY"]
|
||||
|
||||
|
||||
def _get_tags(openpipe_options):
|
||||
tags = openpipe_options.get("tags") or {}
|
||||
@@ -24,10 +28,18 @@ def _get_tags(openpipe_options):
|
||||
return ReportJsonBodyTags.from_dict(tags)
|
||||
|
||||
|
||||
def _should_check_cache(openpipe_options):
|
||||
def _should_check_cache(openpipe_options, req_payload):
|
||||
if configured_client.token == "":
|
||||
return False
|
||||
return openpipe_options.get("cache", False)
|
||||
|
||||
cache_requested = openpipe_options.get("cache", False)
|
||||
streaming = req_payload.get("stream", False)
|
||||
if cache_requested and streaming:
|
||||
print(
|
||||
"Caching is not yet supported for streaming requests. Ignoring cache flag. Vote for this feature at https://github.com/OpenPipe/OpenPipe/issues/159"
|
||||
)
|
||||
return False
|
||||
return cache_requested
|
||||
|
||||
|
||||
def _process_cache_payload(
|
||||
@@ -44,7 +56,7 @@ def maybe_check_cache(
|
||||
openpipe_options={},
|
||||
req_payload={},
|
||||
):
|
||||
if not _should_check_cache(openpipe_options):
|
||||
if not _should_check_cache(openpipe_options, req_payload):
|
||||
return None
|
||||
try:
|
||||
payload = check_cache.sync(
|
||||
@@ -68,7 +80,7 @@ async def maybe_check_cache_async(
|
||||
openpipe_options={},
|
||||
req_payload={},
|
||||
):
|
||||
if not _should_check_cache(openpipe_options):
|
||||
if not _should_check_cache(openpipe_options, req_payload):
|
||||
return None
|
||||
|
||||
try:
|
||||
|
||||
@@ -27,12 +27,14 @@ def last_logged_call():
|
||||
return local_testing_only_get_latest_logged_call.sync(client=configured_client)
|
||||
|
||||
|
||||
@pytest.mark.focus
|
||||
def test_sync():
|
||||
completion = openai.ChatCompletion.create(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[{"role": "system", "content": "count to 3"}],
|
||||
)
|
||||
|
||||
print("completion is", completion)
|
||||
last_logged = last_logged_call()
|
||||
assert (
|
||||
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
|
||||
@@ -42,7 +44,7 @@ def test_sync():
|
||||
last_logged.model_response.req_payload["messages"][0]["content"] == "count to 3"
|
||||
)
|
||||
|
||||
assert completion.openpipe.cache_status == "SKIP"
|
||||
assert completion.openpipe["cache_status"] == "SKIP"
|
||||
|
||||
|
||||
def test_streaming():
|
||||
@@ -75,7 +77,7 @@ async def test_async():
|
||||
== "count down from 5"
|
||||
)
|
||||
|
||||
assert completion.openpipe.cache_status == "SKIP"
|
||||
assert completion.openpipe["cache_status"] == "SKIP"
|
||||
|
||||
|
||||
async def test_async_streaming():
|
||||
@@ -87,7 +89,7 @@ async def test_async_streaming():
|
||||
|
||||
merged = None
|
||||
async for chunk in completion:
|
||||
assert chunk.openpipe.cache_status == "SKIP"
|
||||
assert chunk.openpipe["cache_status"] == "SKIP"
|
||||
merged = merge_openai_chunks(merged, chunk)
|
||||
|
||||
last_logged = last_logged_call()
|
||||
@@ -100,7 +102,7 @@ async def test_async_streaming():
|
||||
last_logged.model_response.req_payload["messages"][0]["content"]
|
||||
== "count down from 5"
|
||||
)
|
||||
assert merged["openpipe"].cache_status == "SKIP"
|
||||
assert merged["openpipe"]["cache_status"] == "SKIP"
|
||||
|
||||
|
||||
def test_sync_with_tags():
|
||||
@@ -146,7 +148,7 @@ async def test_caching():
|
||||
messages=messages,
|
||||
openpipe={"cache": True},
|
||||
)
|
||||
assert completion.openpipe.cache_status == "MISS"
|
||||
assert completion.openpipe["cache_status"] == "MISS"
|
||||
|
||||
first_logged = last_logged_call()
|
||||
assert (
|
||||
@@ -159,4 +161,4 @@ async def test_caching():
|
||||
messages=messages,
|
||||
openpipe={"cache": True},
|
||||
)
|
||||
assert completion2.openpipe.cache_status == "HIT"
|
||||
assert completion2.openpipe["cache_status"] == "HIT"
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
[tool.poetry]
|
||||
name = "openpipe"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["Kyle Corbitt <kyle@corbt.com>"]
|
||||
version = "3.0.1"
|
||||
description = "Python client library for the OpenPipe service"
|
||||
authors = ["Kyle Corbitt <kyle@openpipe.ai>"]
|
||||
license = "Apache-2.0"
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/OpenPipe/OpenPipe"
|
||||
repository = "https://github.com/OpenPipe/OpenPipe"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.9"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user