Compare commits

...

114 Commits

Author SHA1 Message Date
David Corbitt
03a8d094fc Add header to scenario modal 2023-08-18 00:07:48 -07:00
David Corbitt
2b990622f5 Hide expand button for empty scenario editor 2023-08-18 00:06:05 -07:00
David Corbitt
d079eba557 Allow user to delete scenario without variables 2023-08-18 00:02:26 -07:00
Kyle Corbitt
d0102e3202 Merge pull request #171 from OpenPipe/experiment-slug
Use shorter experiment IDs
2023-08-17 23:33:30 -07:00
Kyle Corbitt
bd571c4c4e Merge pull request #170 from OpenPipe/jobs-log
Enqueue tasks more efficiently
2023-08-17 23:33:20 -07:00
Kyle Corbitt
296eb23d97 Use shorter experiment IDs
Because https://app.openpipe.ai/experiments/B1EtN6oHeXMele2 is a cooler URL than https://app.openpipe.ai/experiments/3692942c-6f1b-4bef-83b1-c11f00a3fbdd
2023-08-17 23:28:56 -07:00
Kyle Corbitt
4e2ae7a441 Enqueue tasks more efficiently
Previously we were opening a new database connection for each task we added. Not a problem at small scale but kinda overwhelming for Postgres now that we have more usage.
2023-08-17 22:42:46 -07:00
Kyle Corbitt
072dcee376 Merge pull request #168 from OpenPipe/jobs-log
Admin dashboard for jobs
2023-08-17 22:26:10 -07:00
Kyle Corbitt
94464c0617 Admin dashboard for jobs
Extremely simple jobs dashboard to sanity-check what we've got going on in the job queue.
2023-08-17 22:20:39 -07:00
arcticfly
980644f13c Support vicuna system message (#167)
* Support vicuna system message

* Change tags to USER and ASSISTANT
2023-08-17 21:02:27 -07:00
arcticfly
6a56250001 Add platypus 13b, vicuna 13b, and nous hermes 7b (#166)
* Add platypus

* Add vicuna 13b and nous hermes 7b
2023-08-17 20:01:10 -07:00
Kyle Corbitt
b1c7bbbd4a Merge pull request #165 from OpenPipe/better-output
Don't define CellWrapper inline
2023-08-17 19:07:32 -07:00
Kyle Corbitt
3e20fa31ca Don't define CellWrapper inline
This way we don't re-render the entire cell every time a variable changes. Better performance and handles modals correctly.

OutputCell is still a pretty messy component, which we'll have to address at some point, but the complexity is still manageable for now.
2023-08-17 17:52:45 -07:00
Kyle Corbitt
48a8e64be1 Merge pull request #164 from OpenPipe/more-models
Add Nous-Hermes and Airoboros models
2023-08-17 17:51:28 -07:00
David Corbitt
f3a5f11195 Temporarilyt remove platypus and stableBeluga models 2023-08-17 16:58:52 -07:00
David Corbitt
da5cbaf4dc Remove console.log 2023-08-17 16:16:22 -07:00
David Corbitt
acf74909c9 Ensure ending newline is displayed 2023-08-17 03:37:32 -07:00
David Corbitt
edac8da4a8 Convert system to user prompt for airoboros 2023-08-17 03:10:55 -07:00
David Corbitt
687f3dd85f Rename prompt modal 2023-08-17 02:34:26 -07:00
David Corbitt
0cef3ab5bd Only enable getTemplatedPromptMessage when modal open 2023-08-17 02:32:02 -07:00
David Corbitt
756b3185de Rename CellOptions 2023-08-17 01:44:18 -07:00
David Corbitt
3776ffc4c3 Change ScenarioRow background color 2023-08-17 01:44:06 -07:00
David Corbitt
82549122e1 Add 4 more models 2023-08-17 01:40:05 -07:00
David Corbitt
56a96a7db6 Use different color for row highlight style 2023-08-16 22:46:22 -07:00
David Corbitt
1596b15727 Fix warning from useLayoutEffect 2023-08-16 22:44:18 -07:00
David Corbitt
70d4a5bd9a Fix project settings padding on desktop 2023-08-16 22:40:27 -07:00
arcticfly
c6ec901374 Ad openpipe/Chat provider with Open-Orca/OpenOrcaxOpenChat-Preview2-13B model (#163)
* Display 4 decimal points in ModelStatsCard

* Add openpipe-chat provider
2023-08-16 22:37:37 -07:00
David Corbitt
ad7665664a Update 7b-chat version 2023-08-16 19:23:01 -07:00
David Corbitt
108e3d1e85 Revert email to table-cell display on md screens 2023-08-16 18:49:14 -07:00
David Corbitt
76f600722a Sort project members by role 2023-08-16 18:30:27 -07:00
David Corbitt
d9a0e4581f Add bgColor behind selected project in menu 2023-08-16 18:16:44 -07:00
arcticfly
b9251ad93c Fix members table mobile styles (#162) 2023-08-16 17:52:25 -07:00
arcticfly
809ef04dc1 Invite members (#161)
* Allow user invitations

* Restyle inviting members

* Remove annoying comment

* Add page for accepting an invitation

* Send invitation email with Brevo

* Prevent admins from removing personal project users

* Mark access ceontrol for cancelProjectInvitation

* Make RadioGroup controlled

* Shorten form helper text

* Use nodemailer to send emails

* Update .env.example
2023-08-16 17:25:31 -07:00
arcticfly
0fba2c9ee7 Add NOT_CONTAINS, fix bugs (#160)
* Fix null case for tag comparisons

* Change debounce time to 500ms

* Add NOT_CONTAINS

* Avoid sql injection

* Store filters by id

* Fix chained NOT_CONTAINS
2023-08-15 16:43:59 -07:00
Kyle Corbitt
ac2ca0f617 Merge pull request #158 from OpenPipe/log-filters
Filter logged calls
2023-08-15 10:16:59 -07:00
David Corbitt
73b9e40ced Give LoggedCallsTable scrollbar 2023-08-15 03:12:59 -07:00
David Corbitt
3447e863cc Prevent model name from wrapping 2023-08-15 02:53:24 -07:00
David Corbitt
897e77b054 Prevent logged calls table flashes 2023-08-15 02:49:46 -07:00
David Corbitt
b22a4cd93b Combine migrations 2023-08-15 02:34:27 -07:00
David Corbitt
3547c85c86 Display tag values 2023-08-15 02:32:05 -07:00
David Corbitt
9636fa033e Add second tag to seed 2023-08-15 02:31:24 -07:00
David Corbitt
890a738568 Filter by tags 2023-08-15 01:50:48 -07:00
David Corbitt
7003595e76 Install lodash-es in client-libs for omit function 2023-08-15 00:59:23 -07:00
David Corbitt
00df4453d3 Remove old prettier files 2023-08-15 00:55:05 -07:00
David Corbitt
4c325fc1cc Move prettier files to top directory 2023-08-15 00:54:52 -07:00
David Corbitt
dfee8a0ed7 Merge branch 'main' into log-filters 2023-08-15 00:41:28 -07:00
David Corbitt
0b4e116783 Undo changes in client-libs 2023-08-15 00:30:35 -07:00
David Corbitt
2bcb1d16a3 Autoresize InputDropdown 2023-08-15 00:27:12 -07:00
David Corbitt
6e7efee21e Seed with tags 2023-08-15 00:26:11 -07:00
David Corbitt
bb9c3a9e61 Condense table 2023-08-15 00:26:05 -07:00
David Corbitt
11bfb5d5e4 Start server with timezone 2023-08-14 23:37:23 -07:00
Kyle Corbitt
b00ab933b3 Merge pull request #157 from OpenPipe/more-js-api
TypeScript SDK mostly working
2023-08-14 23:25:33 -07:00
Kyle Corbitt
8f4e7f7e2e TypeScript SDK mostly working
Ok so this is still pretty rough, and notably there's no reporting for streaming. But for non-streaming requests I've verified that this does in fact report requests locally.
2023-08-14 23:22:27 -07:00
David Corbitt
634739c045 Add InputDropdown 2023-08-14 23:02:08 -07:00
David Corbitt
9a9cbe8fd4 Hide paginators for empty lists 2023-08-14 21:17:03 -07:00
David Corbitt
649dc3376b Debounce filter value updates 2023-08-14 21:00:42 -07:00
David Corbitt
05e774d021 Style filters title 2023-08-14 20:47:18 -07:00
David Corbitt
0e328b13dc Style add filter button 2023-08-14 20:42:51 -07:00
David Corbitt
0a18ca9cd6 Allow filtering by response, model, and status code 2023-08-14 20:16:44 -07:00
David Corbitt
a5fe35912e Allow filter by request contains 2023-08-14 20:01:17 -07:00
David Corbitt
3d3ddbe7a9 Show number of rows in table header 2023-08-14 19:56:15 -07:00
David Corbitt
d8a5617dee Increase button radius 2023-08-14 19:51:06 -07:00
Kyle Corbitt
5da62fdc29 Merge pull request #156 from OpenPipe/move-api
Python package improvements
2023-08-14 19:45:14 -07:00
Kyle Corbitt
754e273049 Python package improvements
Added an endpoint for getting the actual stored responses, and used it to test and improve the python package.
2023-08-14 19:07:03 -07:00
Kyle Corbitt
2863dc2f89 Merge pull request #155 from OpenPipe/move-api
Move the external API into its own router
2023-08-14 17:02:34 -07:00
Kyle Corbitt
c4cef35717 Move the external API into its own router
Auth logic isn't shared between the clients anyway, so co-locating them is confusing since you can't use the same clients to call both. This also makes the codegen clients less verbose.
2023-08-14 16:56:50 -07:00
Kyle Corbitt
8552baf632 Merge pull request #154 from OpenPipe/broken-page
Cap the number of waiting messages we try to render
2023-08-14 15:47:48 -07:00
Kyle Corbitt
f41e2229ca Cap the number of waiting messages we try to render
If an cell was attempted several hours ago and never resolved, it crashes the UI because we try to render thousands of log messages once a second (eg. https://app.openpipe.ai/experiments/372d0827-186e-4a7d-a8a6-1bf7050eb5fd) We should probably have a different UI for cells that have hung for a long time to let you know you should just retry, but this quick fix should work for now.
2023-08-14 15:44:03 -07:00
arcticfly
e649f42c9c Await completions (#153)
* Continue polling stats while waiting for completions to finish

* Clarify convert to function call instructions
2023-08-14 13:03:48 -07:00
Kyle Corbitt
99f305483b Merge pull request #150 from OpenPipe/fix-build
(Probably) fixes the build
2023-08-14 07:59:20 -07:00
arcticfly
b28f4cad57 Remove scenarios header from output table card (#151) 2023-08-13 03:26:58 -07:00
Kyle Corbitt
df4a3a0950 (Probably) fixes the build
This probably fixes the build that I broke in https://github.com/OpenPipe/OpenPipe/pull/149. However, there's a small chance that it fixes it enough to deploy, but not enough to actually work. That would be bad, so not merging until I have time to monitor the deploy.
2023-08-12 23:50:31 -07:00
David Corbitt
e423ad656a Fix ExperimentCard aspect ratio 2023-08-12 23:31:25 -07:00
Kyle Corbitt
7d0d94de3a Merge pull request #149 from OpenPipe/js-client
Load the JS client using pnpm workspaces
2023-08-12 22:56:17 -07:00
Kyle Corbitt
344b257db4 Load the JS client using pnpm workspaces
This makes it so we're using our own openpipe client for all OpenAI calls from the OpenPipe app.

The client doesn't do anything at the moment beyond proxying to the OpenAI lib. But this infra work should make it easier to quickly iterate on the client and test the changes in our own app.
2023-08-12 15:24:48 -07:00
Kyle Corbitt
28b43b6e6d Merge pull request #148 from OpenPipe/js-client
Fix client bugs
2023-08-12 10:38:49 -07:00
Kyle Corbitt
8d373ec9b5 remove unused imports 2023-08-12 10:02:23 -07:00
Kyle Corbitt
537525667d don't reload monaco every render cycle
oops
2023-08-12 09:59:07 -07:00
Kyle Corbitt
519367c553 Fix client bugs
1. PostHog can only be used client-side
2. Can't nest <a> tags in the ProjectMenu
2023-08-12 09:35:52 -07:00
Kyle Corbitt
1a338ec863 Merge pull request #147 from OpenPipe/logs-ui
Style overhaul, make logged calls selectable
2023-08-12 08:48:49 -07:00
David Corbitt
01d0b8f778 Resurrect UserMenu 2023-08-12 04:28:41 -07:00
David Corbitt
d99836ec30 Add experiment button 2023-08-12 04:18:39 -07:00
David Corbitt
33751c12d2 Allow user to select logs 2023-08-12 04:07:58 -07:00
David Corbitt
89815e1f7f Add selectedLogs, rename setSelectedProjectId 2023-08-12 03:35:54 -07:00
David Corbitt
5fa5109f34 Make cache text gray 2023-08-12 03:06:19 -07:00
David Corbitt
b06ab2cbf9 Properly show model 2023-08-12 02:58:28 -07:00
David Corbitt
35fb554038 Center Add Variant button 2023-08-12 02:48:22 -07:00
David Corbitt
f238177277 Fix variant header top right border radius 2023-08-12 02:46:09 -07:00
David Corbitt
723c0f7505 Update colors throughout app 2023-08-12 02:32:09 -07:00
David Corbitt
ce6936f753 Change overall background color and menu 2023-08-12 02:31:52 -07:00
David Corbitt
2a80cbf74a Add relative time back in 2023-08-12 02:06:56 -07:00
David Corbitt
098805ef25 Create loggedCalls.router 2023-08-12 00:03:06 -07:00
David Corbitt
ed90bc5a99 Add dashboard page 2023-08-11 23:34:53 -07:00
arcticfly
de9be8c7ce Allow custom config file (#143)
* Allow custom config file

* Temporarily remove dependency on local openpipe
2023-08-11 23:07:04 -07:00
arcticfly
3e02bcf9b8 Update paginator styles (#142)
* Change paginator icons

* Remove horizontal spacing
2023-08-11 21:11:25 -07:00
Kyle Corbitt
cef2ee31fb Merge pull request #141 from OpenPipe/python-sdk
Add caching in Python
2023-08-11 19:04:18 -07:00
Kyle Corbitt
d7cff0f52e Add caching in Python
Still need it in JS
2023-08-11 19:02:35 -07:00
arcticfly
228c547839 Add logged calls pagination (#140)
* Store model on LoggedCall

* Allow mulitple page sizes

* Add logged calls pagination
2023-08-11 19:00:09 -07:00
Kyle Corbitt
e1fcc8fb38 Merge pull request #139 from OpenPipe/python-sdk
Add a python client library
2023-08-11 17:48:07 -07:00
Kyle Corbitt
8ed47eb4dd Add a python client library
We still don't have any documentation and things are in flux, but you can report your OpenAI API calls to OpenPipe now.
2023-08-11 16:54:50 -07:00
arcticfly
3a908d51aa Store model on LoggedCall (#138) 2023-08-11 16:39:04 -07:00
arcticfly
d9db6d80ea Update external types (#137)
* Separate server and frontend error logic

* Update types in external api
2023-08-11 15:02:14 -07:00
arcticfly
8d1ee62ff1 Record model and cost when reporting logs (#136)
* Rename prompt and completion tokens to input and output tokens

* Add getUsage function

* Record model and cost when reporting log

* Remove unused imports

* Move UsageGraph to its own component

* Standardize model response fields

* Fix types
2023-08-11 13:56:47 -07:00
arcticfly
f270579283 Auto-resize project menu width (#135) 2023-08-10 22:50:39 -07:00
arcticfly
81fbaeae44 Style project settings on mobile (#134)
* Style project settings on mobile

* Use auto-resize text area for display name

* Remove unused import
2023-08-10 22:15:45 -07:00
arcticfly
5277afa199 Change logo (#133)
* Change logo

* Add more vertical padding on desktop

* Fix prettier
2023-08-10 21:44:33 -07:00
arcticfly
76c34d64e6 Change menu styles (#132)
* Change ProjectMenu placement

* Reduce UserMenu width
2023-08-10 18:48:23 -07:00
Kyle Corbitt
454ac9a0d3 Merge pull request #131 from OpenPipe/better-template-vars
Better scenario variable editing
2023-08-10 12:25:54 -07:00
Kyle Corbitt
5ed7adadf9 Better scenario variable editing
Some users have gotten confused by the scenario variable editing interface. This change makes the interface easier to understand.
2023-08-10 12:08:17 -07:00
Kyle Corbitt
b8e0f392ab Merge pull request #130 from OpenPipe/output-wrapping
Preserve linebreaks in model output
2023-08-10 07:26:55 -07:00
Kyle Corbitt
b2af83341d Preserve linebreaks in model output 2023-08-09 21:58:41 -07:00
Kyle Corbitt
e6d229d5f9 Merge pull request #129 from OpenPipe/persist-proj
persist the currently-selected project
2023-08-09 17:05:17 -07:00
Kyle Corbitt
1a6ae3aef7 Merge pull request #128 from OpenPipe/proj-styling
Sidebar styling
2023-08-09 17:05:02 -07:00
Kyle Corbitt
9051d80775 Sidebar styling
Unify the menu styles between the UserMenu and ProjectMenu
2023-08-09 16:47:09 -07:00
232 changed files with 10746 additions and 4727 deletions

5
.dockerignore Normal file
View File

@@ -0,0 +1,5 @@
**/node_modules/
.git
**/.venv/
**/.env*
**/.next/

5
.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
.env
.venv/
*.pyc
node_modules/
*.tsbuildinfo

2
.prettierignore Normal file
View File

@@ -0,0 +1,2 @@
*.schema.json
app/pnpm-lock.yaml

View File

@@ -65,7 +65,14 @@ OpenPipe includes a tool to generate new test scenarios based on your existing p
4. Clone this repository: `git clone https://github.com/openpipe/openpipe`
5. Install the dependencies: `cd openpipe && pnpm install`
6. Create a `.env` file (`cp .env.example .env`) and enter your `OPENAI_API_KEY`.
7. Update `DATABASE_URL` if necessary to point to your Postgres instance and run `pnpm prisma db push` to create the database.
7. Update `DATABASE_URL` if necessary to point to your Postgres instance and run `pnpm prisma migrate dev` to create the database.
8. Create a [GitHub OAuth App](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app) and update the `GITHUB_CLIENT_ID` and `GITHUB_CLIENT_SECRET` values. (Note: a PR to make auth optional when running locally would be a great contribution!)
9. Start the app: `pnpm dev`.
10. Navigate to [http://localhost:3000](http://localhost:3000)
## Testing Locally
1. Copy your `.env` file to `.env.test`.
2. Update the `DATABASE_URL` to have a different database name than your development one
3. Run `DATABASE_URL=[your new datatase url] pnpm prisma migrate dev --skip-seed --skip-generate`
4. Run `pnpm test`

View File

@@ -32,5 +32,11 @@ NEXT_PUBLIC_HOST="http://localhost:3000"
GITHUB_CLIENT_ID="your_client_id"
GITHUB_CLIENT_SECRET="your_secret"
OPENPIPE_BASE_URL="http://localhost:3000/api"
OPENPIPE_BASE_URL="http://localhost:3000/api/v1"
OPENPIPE_API_KEY="your_key"
SENDER_EMAIL="placeholder"
SMTP_HOST="placeholder"
SMTP_PORT="placeholder"
SMTP_LOGIN="placeholder"
SMTP_PASSWORD="placeholder"

View File

@@ -6,7 +6,7 @@ const config = {
overrides: [
{
extends: ["plugin:@typescript-eslint/recommended-requiring-type-checking"],
files: ["*.ts", "*.tsx"],
files: ["*.mts", "*.ts", "*.tsx"],
parserOptions: {
project: path.join(__dirname, "tsconfig.json"),
},

4
app/.gitignore vendored
View File

@@ -34,6 +34,7 @@ yarn-error.log*
# do not commit any .env files to git, except for the .env.example file. https://create.t3.gg/en/usage/env-variables#using-environment-variables
.env
.env*.local
.env.test
# vercel
.vercel
@@ -43,3 +44,6 @@ yarn-error.log*
# Sentry Auth Token
.sentryclirc
# custom openai intialization
src/server/utils/openaiCustomConfig.json

View File

@@ -1,2 +0,0 @@
*.schema.json
pnpm-lock.yaml

View File

@@ -12,19 +12,21 @@ declare module "nextjs-routes" {
export type Route =
| StaticRoute<"/account/signin">
| DynamicRoute<"/api/[...trpc]", { "trpc": string[] }>
| StaticRoute<"/admin/jobs">
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
| StaticRoute<"/api/experiments/og-image">
| StaticRoute<"/api/openapi">
| StaticRoute<"/api/sentry-example-api">
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
| DynamicRoute<"/api/v1/[...trpc]", { "trpc": string[] }>
| StaticRoute<"/api/v1/openapi">
| StaticRoute<"/dashboard">
| DynamicRoute<"/data/[id]", { "id": string }>
| StaticRoute<"/data">
| DynamicRoute<"/experiments/[id]", { "id": string }>
| DynamicRoute<"/experiments/[experimentSlug]", { "experimentSlug": string }>
| StaticRoute<"/experiments">
| StaticRoute<"/">
| StaticRoute<"/logged-calls">
| DynamicRoute<"/invitations/[invitationToken]", { "invitationToken": string }>
| StaticRoute<"/project/settings">
| StaticRoute<"/request-logs">
| StaticRoute<"/sentry-example-page">
| StaticRoute<"/world-champs">
| StaticRoute<"/world-champs/signup">;

View File

@@ -6,13 +6,13 @@ RUN yarn global add pnpm
# DEPS
FROM base as deps
WORKDIR /app
WORKDIR /code
COPY prisma ./
COPY app/prisma app/package.json ./app/
COPY client-libs/typescript/package.json ./client-libs/typescript/
COPY pnpm-lock.yaml pnpm-workspace.yaml ./
COPY package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile
RUN cd app && pnpm install --frozen-lockfile
# BUILDER
FROM base as builder
@@ -25,22 +25,24 @@ ARG NEXT_PUBLIC_SENTRY_DSN
ARG SENTRY_AUTH_TOKEN
ARG NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
WORKDIR /code
COPY --from=deps /code/node_modules ./node_modules
COPY --from=deps /code/app/node_modules ./app/node_modules
COPY --from=deps /code/client-libs/typescript/node_modules ./client-libs/typescript/node_modules
COPY . .
RUN SKIP_ENV_VALIDATION=1 pnpm build
RUN cd app && SKIP_ENV_VALIDATION=1 pnpm build
# RUNNER
FROM base as runner
WORKDIR /app
WORKDIR /code/app
ENV NODE_ENV production
ENV NEXT_TELEMETRY_DISABLED 1
COPY --from=builder /app/ ./
COPY --from=builder /code/ /code/
EXPOSE 3000
ENV PORT 3000
# Run the "run-prod.sh" script
CMD /app/run-prod.sh
CMD /code/app/run-prod.sh

View File

@@ -36,6 +36,8 @@ let config = {
});
return config;
},
transpilePackages: ["openpipe"],
};
config = nextRoutes()(config);

View File

@@ -1,5 +1,6 @@
{
"name": "openpipe",
"name": "openpipe-app",
"private": true,
"type": "module",
"version": "0.1.0",
"license": "Apache-2.0",
@@ -9,22 +10,22 @@
},
"scripts": {
"build": "next build",
"dev:next": "next dev",
"dev:next": "TZ=UTC next dev",
"dev:wss": "pnpm tsx --watch src/wss-server.ts",
"dev:worker": "NODE_ENV='development' pnpm tsx --watch src/server/tasks/worker.ts",
"dev": "concurrently --kill-others 'pnpm dev:next' 'pnpm dev:wss' 'pnpm dev:worker'",
"postinstall": "prisma generate",
"lint": "next lint",
"start": "next start",
"codegen": "tsx src/server/scripts/client-codegen.ts",
"start": "TZ=UTC next start",
"codegen:clients": "tsx src/server/scripts/client-codegen.ts",
"codegen:db": "prisma generate && kysely-codegen --dialect postgres --out-file src/server/db.types.ts",
"seed": "tsx prisma/seed.ts",
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'",
"test": "pnpm vitest --no-threads"
"test": "pnpm vitest"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.5.8",
"@apidevtools/json-schema-ref-parser": "^10.1.0",
"@babel/preset-typescript": "^7.22.5",
"@babel/standalone": "^7.22.9",
"@chakra-ui/anatomy": "^2.2.0",
"@chakra-ui/next-js": "^2.1.4",
@@ -37,6 +38,7 @@
"@monaco-editor/loader": "^1.3.3",
"@next-auth/prisma-adapter": "^1.0.5",
"@prisma/client": "^4.14.0",
"@sendinblue/client": "^3.3.1",
"@sentry/nextjs": "^7.61.0",
"@t3-oss/env-nextjs": "^0.3.1",
"@tabler/icons-react": "^2.22.0",
@@ -64,14 +66,18 @@
"json-stringify-pretty-compact": "^4.0.0",
"jsonschema": "^1.4.1",
"kysely": "^0.26.1",
"kysely-codegen": "^0.10.1",
"lodash-es": "^4.17.21",
"lucide-react": "^0.265.0",
"marked": "^7.0.3",
"next": "^13.4.2",
"next-auth": "^4.22.1",
"next-query-params": "^4.2.3",
"nextjs-cors": "^2.1.2",
"nextjs-routes": "^2.0.1",
"nodemailer": "^6.9.4",
"openai": "4.0.0-beta.7",
"openpipe": "workspace:*",
"pg": "^8.11.2",
"pluralize": "^8.0.0",
"posthog-js": "^1.75.3",
@@ -113,6 +119,7 @@
"@types/json-schema": "^7.0.12",
"@types/lodash-es": "^4.17.8",
"@types/node": "^18.16.0",
"@types/nodemailer": "^6.4.9",
"@types/pg": "^8.10.2",
"@types/pluralize": "^0.0.30",
"@types/prismjs": "^1.26.0",
@@ -128,6 +135,7 @@
"eslint-plugin-unused-imports": "^2.0.0",
"monaco-editor": "^0.40.0",
"openapi-typescript": "^6.3.4",
"openapi-typescript-codegen": "^0.25.0",
"prisma": "^4.14.0",
"raw-loader": "^4.0.2",
"typescript": "^5.0.4",

View File

@@ -0,0 +1 @@
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";

View File

@@ -0,0 +1,66 @@
/*
Warnings:
- You are about to rename the column `completionTokens` to `outputTokens` on the `ModelResponse` table.
- You are about to rename the column `promptTokens` to `inputTokens` on the `ModelResponse` table.
- You are about to rename the column `startTime` on the `LoggedCall` table to `requestedAt`. Ensure compatibility with application logic.
- You are about to rename the column `startTime` on the `LoggedCallModelResponse` table to `requestedAt`. Ensure compatibility with application logic.
- You are about to rename the column `endTime` on the `LoggedCallModelResponse` table to `receivedAt`. Ensure compatibility with application logic.
- You are about to rename the column `error` on the `LoggedCallModelResponse` table to `errorMessage`. Ensure compatibility with application logic.
- You are about to rename the column `respStatus` on the `LoggedCallModelResponse` table to `statusCode`. Ensure compatibility with application logic.
- You are about to rename the column `totalCost` on the `LoggedCallModelResponse` table to `cost`. Ensure compatibility with application logic.
- You are about to rename the column `inputHash` on the `ModelResponse` table to `cacheKey`. Ensure compatibility with application logic.
- You are about to rename the column `output` on the `ModelResponse` table to `respPayload`. Ensure compatibility with application logic.
*/
-- DropIndex
DROP INDEX "LoggedCall_startTime_idx";
-- DropIndex
DROP INDEX "ModelResponse_inputHash_idx";
-- Rename completionTokens to outputTokens
ALTER TABLE "ModelResponse"
RENAME COLUMN "completionTokens" TO "outputTokens";
-- Rename promptTokens to inputTokens
ALTER TABLE "ModelResponse"
RENAME COLUMN "promptTokens" TO "inputTokens";
-- AlterTable
ALTER TABLE "LoggedCall"
RENAME COLUMN "startTime" TO "requestedAt";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "startTime" TO "requestedAt";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "endTime" TO "receivedAt";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "error" TO "errorMessage";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "respStatus" TO "statusCode";
-- AlterTable
ALTER TABLE "LoggedCallModelResponse"
RENAME COLUMN "totalCost" TO "cost";
-- AlterTable
ALTER TABLE "ModelResponse"
RENAME COLUMN "inputHash" TO "cacheKey";
-- AlterTable
ALTER TABLE "ModelResponse"
RENAME COLUMN "output" TO "respPayload";
-- CreateIndex
CREATE INDEX "LoggedCall_requestedAt_idx" ON "LoggedCall"("requestedAt");
-- CreateIndex
CREATE INDEX "ModelResponse_cacheKey_idx" ON "ModelResponse"("cacheKey");

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "LoggedCall" ADD COLUMN "model" TEXT;

View File

@@ -0,0 +1,22 @@
-- DropIndex
DROP INDEX "LoggedCallTag_name_idx";
DROP INDEX "LoggedCallTag_name_value_idx";
-- AlterTable: Add projectId column without NOT NULL constraint for now
ALTER TABLE "LoggedCallTag" ADD COLUMN "projectId" UUID;
-- Set the default value
UPDATE "LoggedCallTag" lct
SET "projectId" = lc."projectId"
FROM "LoggedCall" lc
WHERE lct."loggedCallId" = lc.id;
-- Now set the NOT NULL constraint
ALTER TABLE "LoggedCallTag" ALTER COLUMN "projectId" SET NOT NULL;
-- CreateIndex
CREATE INDEX "LoggedCallTag_projectId_name_idx" ON "LoggedCallTag"("projectId", "name");
CREATE INDEX "LoggedCallTag_projectId_name_value_idx" ON "LoggedCallTag"("projectId", "name", "value");
-- CreateIndex
CREATE UNIQUE INDEX "LoggedCallTag_loggedCallId_name_key" ON "LoggedCallTag"("loggedCallId", "name");

View File

@@ -0,0 +1,25 @@
-- CreateTable
CREATE TABLE "UserInvitation" (
"id" UUID NOT NULL,
"projectId" UUID NOT NULL,
"email" TEXT NOT NULL,
"role" "ProjectUserRole" NOT NULL,
"invitationToken" TEXT NOT NULL,
"senderId" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "UserInvitation_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "UserInvitation_invitationToken_key" ON "UserInvitation"("invitationToken");
-- CreateIndex
CREATE UNIQUE INDEX "UserInvitation_projectId_email_key" ON "UserInvitation"("projectId", "email");
-- AddForeignKey
ALTER TABLE "UserInvitation" ADD CONSTRAINT "UserInvitation_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "UserInvitation" ADD CONSTRAINT "UserInvitation_senderId_fkey" FOREIGN KEY ("senderId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -0,0 +1,88 @@
/*
* Copyright 2023 Viascom Ltd liab. Co
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION nanoid(
size int DEFAULT 21,
alphabet text DEFAULT '_-0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
)
RETURNS text
LANGUAGE plpgsql
volatile
AS
$$
DECLARE
idBuilder text := '';
counter int := 0;
bytes bytea;
alphabetIndex int;
alphabetArray text[];
alphabetLength int;
mask int;
step int;
BEGIN
alphabetArray := regexp_split_to_array(alphabet, '');
alphabetLength := array_length(alphabetArray, 1);
mask := (2 << cast(floor(log(alphabetLength - 1) / log(2)) as int)) - 1;
step := cast(ceil(1.6 * mask * size / alphabetLength) AS int);
while true
loop
bytes := gen_random_bytes(step);
while counter < step
loop
alphabetIndex := (get_byte(bytes, counter) & mask) + 1;
if alphabetIndex <= alphabetLength then
idBuilder := idBuilder || alphabetArray[alphabetIndex];
if length(idBuilder) = size then
return idBuilder;
end if;
end if;
counter := counter + 1;
end loop;
counter := 0;
end loop;
END
$$;
-- Make a short_nanoid function that uses the default alphabet and length of 15
CREATE OR REPLACE FUNCTION short_nanoid()
RETURNS text
LANGUAGE plpgsql
volatile
AS
$$
BEGIN
RETURN nanoid(15, '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
END
$$;
-- AlterTable
ALTER TABLE "Experiment" ADD COLUMN "slug" TEXT NOT NULL DEFAULT short_nanoid();
-- For existing experiments, keep the existing id as the slug for backwards compatibility
UPDATE "Experiment" SET "slug" = "id";
-- CreateIndex
CREATE UNIQUE INDEX "Experiment_slug_key" ON "Experiment"("slug");

View File

@@ -11,7 +11,9 @@ datasource db {
}
model Experiment {
id String @id @default(uuid()) @db.Uuid
id String @id @default(uuid()) @db.Uuid
slug String @unique @default(dbgenerated("short_nanoid()"))
label String
sortIndex Int @default(0)
@@ -112,17 +114,17 @@ model ScenarioVariantCell {
model ModelResponse {
id String @id @default(uuid()) @db.Uuid
inputHash String
requestedAt DateTime?
receivedAt DateTime?
output Json?
cost Float?
promptTokens Int?
completionTokens Int?
statusCode Int?
errorMessage String?
retryTime DateTime?
outdated Boolean @default(false)
cacheKey String
requestedAt DateTime?
receivedAt DateTime?
respPayload Json?
cost Float?
inputTokens Int?
outputTokens Int?
statusCode Int?
errorMessage String?
retryTime DateTime?
outdated Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -131,7 +133,7 @@ model ModelResponse {
scenarioVariantCell ScenarioVariantCell @relation(fields: [scenarioVariantCellId], references: [id], onDelete: Cascade)
outputEvaluations OutputEvaluation[]
@@index([inputHash])
@@index([cacheKey])
}
enum EvalType {
@@ -207,13 +209,14 @@ model Project {
personalProjectUserId String? @unique @db.Uuid
personalProjectUser User? @relation(fields: [personalProjectUserId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
projectUsers ProjectUser[]
experiments Experiment[]
datasets Dataset[]
loggedCalls LoggedCall[]
apiKeys ApiKey[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
projectUsers ProjectUser[]
projectUserInvitations UserInvitation[]
experiments Experiment[]
datasets Dataset[]
loggedCalls LoggedCall[]
apiKeys ApiKey[]
}
enum ProjectUserRole {
@@ -256,7 +259,7 @@ model WorldChampEntrant {
model LoggedCall {
id String @id @default(uuid()) @db.Uuid
startTime DateTime
requestedAt DateTime
// True if this call was served from the cache, false otherwise
cacheHit Boolean
@@ -273,12 +276,13 @@ model LoggedCall {
projectId String @db.Uuid
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
tags LoggedCallTag[]
model String?
tags LoggedCallTag[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([startTime])
@@index([requestedAt])
}
model LoggedCallModelResponse {
@@ -287,14 +291,14 @@ model LoggedCallModelResponse {
reqPayload Json
// The HTTP status returned by the model provider
respStatus Int?
statusCode Int?
respPayload Json?
// Should be null if the request was successful, and some string if the request failed.
error String?
errorMessage String?
startTime DateTime
endTime DateTime
requestedAt DateTime
receivedAt DateTime
// Note: the function to calculate the cacheKey should include the project
// ID so we don't share cached responses between projects, which could be an
@@ -308,7 +312,7 @@ model LoggedCallModelResponse {
outputTokens Int?
finishReason String?
completionId String?
totalCost Decimal? @db.Decimal(18, 12)
cost Decimal? @db.Decimal(18, 12)
// The LoggedCall that created this LoggedCallModelResponse
originalLoggedCallId String @unique @db.Uuid
@@ -322,15 +326,17 @@ model LoggedCallModelResponse {
}
model LoggedCallTag {
id String @id @default(uuid()) @db.Uuid
name String
value String?
id String @id @default(uuid()) @db.Uuid
name String
value String?
projectId String @db.Uuid
loggedCallId String @db.Uuid
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
@@index([name])
@@index([name, value])
@@unique([loggedCallId, name])
@@index([projectId, name])
@@index([projectId, name, value])
}
model ApiKey {
@@ -339,8 +345,8 @@ model ApiKey {
name String
apiKey String @unique
projectId String @db.Uuid
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -387,16 +393,33 @@ model User {
role UserRole @default(USER)
accounts Account[]
sessions Session[]
projectUsers ProjectUser[]
projects Project[]
worldChampEntrant WorldChampEntrant?
accounts Account[]
sessions Session[]
projectUsers ProjectUser[]
projects Project[]
worldChampEntrant WorldChampEntrant?
sentUserInvitations UserInvitation[]
createdAt DateTime @default(now())
updatedAt DateTime @default(now()) @updatedAt
}
model UserInvitation {
id String @id @default(uuid()) @db.Uuid
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
email String
role ProjectUserRole
invitationToken String @unique
senderId String @db.Uuid
sender User @relation(fields: [senderId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([projectId, email])
}
model VerificationToken {
identifier String
token String @unique

View File

@@ -2,6 +2,7 @@ import { prisma } from "~/server/db";
import dedent from "dedent";
import { generateNewCell } from "~/server/utils/generateNewCell";
import { promptConstructorVersion } from "~/promptConstructor/version";
import { env } from "~/env.mjs";
const defaultId = "11111111-1111-1111-1111-111111111111";
@@ -9,6 +10,14 @@ await prisma.project.deleteMany({
where: { id: defaultId },
});
// Mark all users as admins
await prisma.user.updateMany({
where: {},
data: {
role: "ADMIN",
},
});
// If there's an existing project, just seed into it
const project =
(await prisma.project.findFirst({})) ??
@@ -16,6 +25,20 @@ const project =
data: { id: defaultId },
}));
if (env.OPENPIPE_API_KEY) {
await prisma.apiKey.upsert({
where: {
apiKey: env.OPENPIPE_API_KEY,
},
create: {
projectId: project.id,
name: "Default API Key",
apiKey: env.OPENPIPE_API_KEY,
},
update: {},
});
}
await prisma.experiment.deleteMany({
where: {
id: defaultId,

View File

@@ -13,6 +13,7 @@ const MODEL_RESPONSE_TEMPLATES: {
inputTokens: number;
outputTokens: number;
finishReason: string;
tags: { name: string; value: string }[];
}[] = [
{
reqPayload: {
@@ -107,6 +108,7 @@ const MODEL_RESPONSE_TEMPLATES: {
inputTokens: 236,
outputTokens: 5,
finishReason: "stop",
tags: [],
},
{
reqPayload: {
@@ -193,6 +195,7 @@ const MODEL_RESPONSE_TEMPLATES: {
inputTokens: 222,
outputTokens: 5,
finishReason: "stop",
tags: [],
},
{
reqPayload: {
@@ -231,6 +234,7 @@ const MODEL_RESPONSE_TEMPLATES: {
inputTokens: 14,
outputTokens: 7,
finishReason: "stop",
tags: [{ name: "prompt_id", value: "id2" }],
},
{
reqPayload: {
@@ -306,6 +310,10 @@ const MODEL_RESPONSE_TEMPLATES: {
inputTokens: 2802,
outputTokens: 108,
finishReason: "stop",
tags: [
{ name: "prompt_id", value: "chatcmpl-7lQS3MktOT8BTgNEytl9dkyssCQqL" },
{ name: "some_other_tag", value: "some_other_value" },
],
},
];
@@ -339,17 +347,18 @@ for (let i = 0; i < 1437; i++) {
MODEL_RESPONSE_TEMPLATES[Math.floor(Math.random() * MODEL_RESPONSE_TEMPLATES.length)]!;
const model = template.reqPayload.model;
// choose random time in the last two weeks, with a bias towards the last few days
const startTime = new Date(Date.now() - Math.pow(Math.random(), 2) * 1000 * 60 * 60 * 24 * 14);
const requestedAt = new Date(Date.now() - Math.pow(Math.random(), 2) * 1000 * 60 * 60 * 24 * 14);
// choose random delay anywhere from 2 to 10 seconds later for gpt-4, or 1 to 5 seconds for gpt-3.5
const delay =
model === "gpt-4" ? 1000 * 2 + Math.random() * 1000 * 8 : 1000 + Math.random() * 1000 * 4;
const endTime = new Date(startTime.getTime() + delay);
const receivedAt = new Date(requestedAt.getTime() + delay);
loggedCallsToCreate.push({
id: loggedCallId,
cacheHit: false,
startTime,
requestedAt,
projectId: project.id,
createdAt: startTime,
model: template.reqPayload.model,
createdAt: requestedAt,
});
const { promptTokenPrice, completionTokenPrice } =
@@ -365,21 +374,20 @@ for (let i = 0; i < 1437; i++) {
loggedCallModelResponsesToCreate.push({
id: loggedCallModelResponseId,
startTime,
endTime,
requestedAt,
receivedAt,
originalLoggedCallId: loggedCallId,
reqPayload: template.reqPayload,
respPayload: template.respPayload,
respStatus: template.respStatus,
error: template.error,
createdAt: startTime,
statusCode: template.respStatus,
errorMessage: template.error,
createdAt: requestedAt,
cacheKey: hashRequest(project.id, template.reqPayload as JsonValue),
durationMs: endTime.getTime() - startTime.getTime(),
durationMs: receivedAt.getTime() - requestedAt.getTime(),
inputTokens: template.inputTokens,
outputTokens: template.outputTokens,
finishReason: template.finishReason,
totalCost:
template.inputTokens * promptTokenPrice + template.outputTokens * completionTokenPrice,
cost: template.inputTokens * promptTokenPrice + template.outputTokens * completionTokenPrice,
});
loggedCallsToUpdate.push({
where: {
@@ -389,11 +397,14 @@ for (let i = 0; i < 1437; i++) {
modelResponseId: loggedCallModelResponseId,
},
});
loggedCallTagsToCreate.push({
loggedCallId,
name: "$model",
value: template.reqPayload.model,
});
for (const tag of template.tags) {
loggedCallTagsToCreate.push({
projectId: project.id,
loggedCallId,
name: tag.name,
value: tag.value,
});
}
}
await prisma.$transaction([

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.8 KiB

After

Width:  |  Height:  |  Size: 6.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.1 KiB

After

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 704 B

After

Width:  |  Height:  |  Size: 800 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.0 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View File

@@ -9,10 +9,9 @@ Created by potrace 1.14, written by Peter Selinger 2001-2017
</metadata>
<g transform="translate(0.000000,550.000000) scale(0.100000,-0.100000)"
fill="#000000" stroke="none">
<path d="M813 5478 c-18 -13 -37 -36 -43 -52 -6 -19 -10 -236 -10 -603 0 -638
-1 -626 65 -657 25 -12 67 -16 179 -16 l146 0 0 -2032 0 -2032 23 -33 c12 -18
35 -37 51 -43 19 -7 539 -10 1528 -10 1663 0 1549 -5 1582 65 14 30 16 235 16
2059 l0 2026 156 0 156 0 39 39 39 39 0 587 c0 651 1 638 -65 669 -30 14 -223
16 -1932 16 l-1898 0 -32 -22z"/>
<path d="M785 5474 l-25 -27 0 -622 0 -622 25 -27 24 -26 171 0 170 0 0 -2050
0 -2051 25 -25 24 -24 1557 2 1556 3 19 24 c19 23 19 70 19 2072 l0 2049 169
0 c165 0 169 1 195 25 l26 24 0 626 0 626 -26 24 -27 25 -1939 0 -1939 0 -24
-26z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 858 B

After

Width:  |  Height:  |  Size: 755 B

View File

@@ -1,5 +1,28 @@
<svg width="380" height="320" viewBox="0 0 380 320" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M72 320L122.5 231L130.5 150.5L115 73L72 0H312L265 64.5L257 158.5L265 249L312 320H72Z" fill="#FF5733"/>
<path d="M67.027 9.5C72.9909 9.5 79.5196 12.3449 86.3672 19.2588C93.2495 26.2075 99.8845 36.7468 105.66 50.5336C117.194 78.0671 124.554 116.764 124.554 160C124.554 203.236 117.194 241.933 105.66 269.466C99.8845 283.253 93.2495 293.793 86.3672 300.741C79.5196 307.655 72.9909 310.5 67.027 310.5C61.0632 310.5 54.5345 307.655 47.6868 300.741C40.8045 293.793 34.1695 283.253 28.394 269.466C16.8596 241.933 9.5 203.236 9.5 160C9.5 116.764 16.8596 78.0671 28.394 50.5336C34.1695 36.7468 40.8045 26.2075 47.6868 19.2588C54.5345 12.3449 61.0632 9.5 67.027 9.5Z" stroke="#FF5733" stroke-width="19"/>
<path d="M312.027 9.5C317.991 9.5 324.52 12.3449 331.367 19.2588C338.25 26.2075 344.885 36.7468 350.66 50.5336C362.194 78.0671 369.554 116.764 369.554 160C369.554 203.236 362.194 241.933 350.66 269.466C344.885 283.253 338.25 293.793 331.367 300.741C324.52 307.655 317.991 310.5 312.027 310.5C306.063 310.5 299.534 307.655 292.687 300.741C285.805 293.793 279.17 283.253 273.394 269.466C261.86 241.933 254.5 203.236 254.5 160C254.5 116.764 261.86 78.0671 273.394 50.5336C279.17 36.7468 285.805 26.2075 292.687 19.2588C299.534 12.3449 306.063 9.5 312.027 9.5Z" stroke="#FF5733" stroke-width="19"/>
<svg width="398" height="550" viewBox="0 0 398 550" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M39 125H359V542C359 546.418 355.418 550 351 550H47C42.5817 550 39 546.418 39 542V125Z" fill="black"/>
<path d="M0 8C0 3.58172 3.58172 0 8 0H390C394.418 0 398 3.58172 398 8V127C398 131.418 394.418 135 390 135H7.99999C3.58171 135 0 131.418 0 127V8Z" fill="black"/>
<path d="M50 135H348V535C348 537.209 346.209 539 344 539H54C51.7909 539 50 537.209 50 535V135Z" fill="#FF5733"/>
<path d="M11 14.0001C11 11.791 12.7909 10.0001 15 10.0001H384C386.209 10.0001 388 11.791 388 14.0001V120C388 122.209 386.209 124 384 124H15C12.7909 124 11 122.209 11 120V14.0001Z" fill="#FF5733"/>
<path d="M11 14.0001C11 11.791 12.7909 10.0001 15 10.0001H384C386.209 10.0001 388 11.791 388 14.0001V120C388 122.209 386.209 124 384 124H15C12.7909 124 11 122.209 11 120V14.0001Z" fill="url(#paint0_linear_102_49)"/>
<path d="M50 134H348V535C348 537.209 346.209 539 344 539H54C51.7909 539 50 537.209 50 535V134Z" fill="url(#paint1_linear_102_49)"/>
<path d="M108 142H156V535H108V142Z" fill="white"/>
<path d="M300 135H348V535C348 537.209 346.209 539 344 539H300V135Z" fill="white" fill-opacity="0.25"/>
<path d="M96 142H108V535H96V142Z" fill="white" fill-opacity="0.5"/>
<path d="M84 10.0001H133V120H84V10.0001Z" fill="white"/>
<path d="M339 10.0001H384C386.209 10.0001 388 11.791 388 14.0001V120C388 122.209 386.209 124 384 124H339V10.0001Z" fill="white" fill-opacity="0.25"/>
<path d="M71.9995 10.0001H83.9995V120H71.9995V10.0001Z" fill="white" fill-opacity="0.5"/>
<path d="M108 534.529H156V539.019H108V534.529Z" fill="#AAAAAA"/>
<path opacity="0.5" d="M95.9927 534.529H107.982V539.019H95.9927V534.529Z" fill="#AAAAAA"/>
<path d="M84.0029 119.887H133.007V124.027H84.0029V119.887Z" fill="#AAAAAA"/>
<path opacity="0.5" d="M71.9883 119.887H83.978V124.027H71.9883V119.887Z" fill="#AAAAAA"/>
<defs>
<linearGradient id="paint0_linear_102_49" x1="335" y1="67.0001" x2="137" y2="67.0001" gradientUnits="userSpaceOnUse">
<stop stop-color="#D62600"/>
<stop offset="1" stop-color="#FF5733" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint1_linear_102_49" x1="306.106" y1="336.5" x2="149.597" y2="336.5" gradientUnits="userSpaceOnUse">
<stop stop-color="#D62600"/>
<stop offset="1" stop-color="#FF5733" stop-opacity="0"/>
</linearGradient>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

After

Width:  |  Height:  |  Size: 26 KiB

View File

@@ -1,13 +1,13 @@
import { Textarea, type TextareaProps } from "@chakra-ui/react";
import ResizeTextarea from "react-textarea-autosize";
import React, { useLayoutEffect, useState } from "react";
import React, { useEffect, useState } from "react";
export const AutoResizeTextarea: React.ForwardRefRenderFunction<
HTMLTextAreaElement,
TextareaProps & { minRows?: number }
> = ({ minRows = 1, overflowY = "hidden", ...props }, ref) => {
const [isRerendered, setIsRerendered] = useState(false);
useLayoutEffect(() => setIsRerendered(true), []);
useEffect(() => setIsRerendered(true), []);
return (
<Textarea

View File

@@ -87,7 +87,7 @@ export const ModelStatsCard = ({
label="Price"
info={
<Text>
${model.pricePerSecond.toFixed(3)}
${model.pricePerSecond.toFixed(4)}
<Text color="gray.500"> / second</Text>
</Text>
}

View File

@@ -1,15 +1,16 @@
import { HStack, Icon, IconButton, Tooltip, Text } from "@chakra-ui/react";
import { HStack, Icon, IconButton, Tooltip, Text, type StackProps } from "@chakra-ui/react";
import { useState } from "react";
import { MdContentCopy } from "react-icons/md";
import { useHandledAsyncCallback } from "~/utils/hooks";
const CopiableCode = ({ code }: { code: string }) => {
const CopiableCode = ({ code, ...rest }: { code: string } & StackProps) => {
const [copied, setCopied] = useState(false);
const [copyToClipboard] = useHandledAsyncCallback(async () => {
await navigator.clipboard.writeText(code);
setCopied(true);
}, [code]);
return (
<HStack
backgroundColor="blackAlpha.800"
@@ -18,9 +19,19 @@ const CopiableCode = ({ code }: { code: string }) => {
padding={3}
w="full"
justifyContent="space-between"
alignItems="flex-start"
{...rest}
>
<Text fontFamily="inconsolata" fontWeight="bold" letterSpacing={0.5}>
<Text
fontFamily="inconsolata"
fontWeight="bold"
letterSpacing={0.5}
overflowX="auto"
whiteSpace="pre-wrap"
>
{code}
{/* Necessary for trailing newline to actually be displayed */}
{code.endsWith("\n") ? "\n" : ""}
</Text>
<Tooltip closeOnClick={false} label={copied ? "Copied!" : "Copy to clipboard"}>
<IconButton

View File

@@ -8,8 +8,8 @@ export default function Favicon() {
<link rel="icon" type="image/png" sizes="16x16" href="/favicons/favicon-16x16.png" />
<link rel="manifest" href="/favicons/site.webmanifest" />
<link rel="shortcut icon" href="/favicons/favicon.ico" />
<link rel="mask-icon" href="/favicons/safari-pinned-tab.svg" color="#5bbad5" />
<meta name="msapplication-TileColor" content="#da532c" />
<meta name="msapplication-config" content="/favicons/browserconfig.xml" />
<meta name="theme-color" content="#ffffff" />
</Head>
);

View File

@@ -0,0 +1,80 @@
import {
Input,
InputGroup,
InputRightElement,
Icon,
Popover,
PopoverTrigger,
PopoverContent,
VStack,
HStack,
Button,
Text,
useDisclosure,
} from "@chakra-ui/react";
import { FiChevronDown } from "react-icons/fi";
import { BiCheck } from "react-icons/bi";
type InputDropdownProps<T> = {
options: ReadonlyArray<T>;
selectedOption: T;
onSelect: (option: T) => void;
};
const InputDropdown = <T,>({ options, selectedOption, onSelect }: InputDropdownProps<T>) => {
const popover = useDisclosure();
return (
<Popover placement="bottom-start" {...popover}>
<PopoverTrigger>
<InputGroup cursor="pointer" w={(selectedOption as string).length * 14 + 180}>
<Input
value={selectedOption as string}
// eslint-disable-next-line @typescript-eslint/no-empty-function -- controlled input requires onChange
onChange={() => {}}
cursor="pointer"
borderColor={popover.isOpen ? "blue.500" : undefined}
_hover={popover.isOpen ? { borderColor: "blue.500" } : undefined}
contentEditable={false}
// disable focus
onFocus={(e) => {
e.target.blur();
}}
/>
<InputRightElement>
<Icon as={FiChevronDown} />
</InputRightElement>
</InputGroup>
</PopoverTrigger>
<PopoverContent boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);" minW={0} w="auto">
<VStack spacing={0}>
{options?.map((option, index) => (
<HStack
key={index}
as={Button}
onClick={() => {
onSelect(option);
popover.onClose();
}}
w="full"
variant="ghost"
justifyContent="space-between"
fontWeight="semibold"
borderRadius={0}
colorScheme="blue"
color="black"
fontSize="sm"
borderBottomWidth={1}
>
<Text mr={16}>{option as string}</Text>
{option === selectedOption && <Icon as={BiCheck} color="blue.500" boxSize={5} />}
</HStack>
))}
</VStack>
</PopoverContent>
</Popover>
);
};
export default InputDropdown;

View File

@@ -33,25 +33,11 @@ export default function AddVariantButton() {
<Flex w="100%" justifyContent="flex-end">
<ActionButton
onClick={onClick}
py={5}
py={7}
leftIcon={<Icon as={loading ? Spinner : BsPlus} boxSize={6} mr={loading ? 1 : 0} />}
>
<Text display={{ base: "none", md: "flex" }}>Add Variant</Text>
</ActionButton>
{/* <Button
alignItems="center"
justifyContent="center"
fontWeight="normal"
bgColor="transparent"
_hover={{ bgColor: "gray.100" }}
px={cellPadding.x}
onClick={onClick}
height="unset"
minH={headerMinHeight}
>
<Icon as={loading ? Spinner : BsPlus} boxSize={6} mr={loading ? 1 : 0} />
<Text display={{ base: "none", md: "flex" }}>Add Variant</Text>
</Button> */}
</Flex>
);
}

View File

@@ -12,6 +12,7 @@ import {
Select,
FormHelperText,
Code,
IconButton,
} from "@chakra-ui/react";
import { type Evaluation, EvalType } from "@prisma/client";
import { useCallback, useState } from "react";
@@ -183,46 +184,37 @@ export default function EditEvaluations() {
<Text flex={1}>
{evaluation.evalType}: &quot;{evaluation.value}&quot;
</Text>
<Button
<IconButton
aria-label="Edit"
variant="unstyled"
color="gray.400"
height="unset"
width="unset"
minW="unset"
color="gray.400"
onClick={() => setEditingId(evaluation.id)}
_hover={{
color: "gray.800",
cursor: "pointer",
}}
>
<Icon as={BsPencil} boxSize={4} />
</Button>
<Button
_hover={{ color: "gray.800", cursor: "pointer" }}
icon={<Icon as={BsPencil} />}
/>
<IconButton
aria-label="Delete"
variant="unstyled"
color="gray.400"
height="unset"
width="unset"
minW="unset"
color="gray.400"
onClick={() => onDelete(evaluation.id)}
_hover={{
color: "gray.800",
cursor: "pointer",
}}
>
<Icon as={BsX} boxSize={6} />
</Button>
_hover={{ color: "gray.800", cursor: "pointer" }}
icon={<Icon as={BsX} boxSize={6} />}
/>
</HStack>
),
)}
{editingId == null && (
<Button
onClick={() => setEditingId("new")}
alignSelf="flex-start"
alignSelf="end"
size="sm"
mt={4}
colorScheme="blue"
>
Add Evaluation
New Evaluation
</Button>
)}
{editingId == "new" && (

View File

@@ -1,103 +1,185 @@
import { Text, Button, HStack, Heading, Icon, Input, Stack } from "@chakra-ui/react";
import { useState } from "react";
import { BsCheck, BsX } from "react-icons/bs";
import { Text, Button, HStack, Heading, Icon, IconButton, Stack, VStack } from "@chakra-ui/react";
import { type TemplateVariable } from "@prisma/client";
import { useEffect, useState } from "react";
import { BsPencil, BsX } from "react-icons/bs";
import { api } from "~/utils/api";
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
import { useExperiment, useHandledAsyncCallback, useScenarioVars } from "~/utils/hooks";
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
import { FloatingLabelInput } from "./FloatingLabelInput";
export const ScenarioVar = ({
variable,
isEditing,
setIsEditing,
}: {
variable: Pick<TemplateVariable, "id" | "label">;
isEditing: boolean;
setIsEditing: (isEditing: boolean) => void;
}) => {
const utils = api.useContext();
const [label, setLabel] = useState(variable.label);
useEffect(() => {
setLabel(variable.label);
}, [variable.label]);
const renameVarMutation = api.scenarioVars.rename.useMutation();
const [onRename] = useHandledAsyncCallback(async () => {
const resp = await renameVarMutation.mutateAsync({ id: variable.id, label });
if (maybeReportError(resp)) return;
setIsEditing(false);
await utils.scenarioVars.list.invalidate();
await utils.scenarios.list.invalidate();
}, [label, variable.id]);
const deleteMutation = api.scenarioVars.delete.useMutation();
const [onDeleteVar] = useHandledAsyncCallback(async () => {
await deleteMutation.mutateAsync({ id: variable.id });
await utils.scenarioVars.list.invalidate();
}, [variable.id]);
if (isEditing) {
return (
<HStack w="full">
<FloatingLabelInput
flex={1}
label="Renamed Variable"
value={label}
onChange={(e) => setLabel(e.target.value)}
onKeyDown={(e) => {
if (e.key === "Enter") {
e.preventDefault();
onRename();
}
// If the user types a space, replace it with an underscore
if (e.key === " ") {
e.preventDefault();
setLabel((label) => label && `${label}_`);
}
}}
/>
<Button size="sm" onClick={() => setIsEditing(false)}>
Cancel
</Button>
<Button size="sm" colorScheme="blue" onClick={onRename}>
Save
</Button>
</HStack>
);
} else {
return (
<HStack w="full" borderTopWidth={1} borderColor="gray.200">
<Text flex={1}>{variable.label}</Text>
<IconButton
aria-label="Edit"
variant="unstyled"
minW="unset"
color="gray.400"
onClick={() => setIsEditing(true)}
_hover={{ color: "gray.800", cursor: "pointer" }}
icon={<Icon as={BsPencil} />}
/>
<IconButton
aria-label="Delete"
variant="unstyled"
minW="unset"
color="gray.400"
onClick={onDeleteVar}
_hover={{ color: "gray.800", cursor: "pointer" }}
icon={<Icon as={BsX} boxSize={6} />}
/>
</HStack>
);
}
};
export default function EditScenarioVars() {
const experiment = useExperiment();
const vars =
api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" }).data ?? [];
const vars = useScenarioVars();
const [currentlyEditingId, setCurrentlyEditingId] = useState<string | null>(null);
const [newVariable, setNewVariable] = useState<string>("");
const newVarIsValid = newVariable.length > 0 && !vars.map((v) => v.label).includes(newVariable);
const newVarIsValid = newVariable?.length ?? 0 > 0;
const utils = api.useContext();
const addVarMutation = api.templateVars.create.useMutation();
const addVarMutation = api.scenarioVars.create.useMutation();
const [onAddVar] = useHandledAsyncCallback(async () => {
if (!experiment.data?.id) return;
if (!newVarIsValid) return;
await addVarMutation.mutateAsync({
if (!newVariable) return;
const resp = await addVarMutation.mutateAsync({
experimentId: experiment.data.id,
label: newVariable,
});
await utils.templateVars.list.invalidate();
if (maybeReportError(resp)) return;
await utils.scenarioVars.list.invalidate();
setNewVariable("");
}, [addVarMutation, experiment.data?.id, newVarIsValid, newVariable]);
const deleteMutation = api.templateVars.delete.useMutation();
const [onDeleteVar] = useHandledAsyncCallback(async (id: string) => {
await deleteMutation.mutateAsync({ id });
await utils.templateVars.list.invalidate();
}, []);
return (
<Stack>
<Heading size="sm">Scenario Variables</Heading>
<Stack spacing={2}>
<VStack spacing={4}>
<Text fontSize="sm">
Scenario variables can be used in your prompt variants as well as evaluations.
</Text>
<HStack spacing={0}>
<Input
placeholder="Add Scenario Variable"
size="sm"
borderTopRadius={0}
borderRightRadius={0}
value={newVariable}
onChange={(e) => setNewVariable(e.target.value)}
onKeyDown={(e) => {
if (e.key === "Enter") {
e.preventDefault();
onAddVar();
}
// If the user types a space, replace it with an underscore
if (e.key === " ") {
e.preventDefault();
setNewVariable((v) => v + "_");
}
}}
/>
<Button
size="xs"
height="100%"
borderLeftRadius={0}
isDisabled={!newVarIsValid}
onClick={onAddVar}
>
<Icon as={BsCheck} boxSize={8} />
</Button>
</HStack>
<HStack spacing={2} py={4} wrap="wrap">
{vars.map((variable) => (
<HStack
<VStack spacing={0} w="full">
{vars.data?.map((variable) => (
<ScenarioVar
variable={variable}
key={variable.id}
spacing={0}
bgColor="blue.100"
color="blue.600"
pl={2}
pr={0}
fontWeight="bold"
>
<Text fontSize="sm" flex={1}>
{variable.label}
</Text>
<Button
size="xs"
variant="ghost"
colorScheme="blue"
p="unset"
minW="unset"
px="unset"
onClick={() => onDeleteVar(variable.id)}
>
<Icon as={BsX} boxSize={6} color="blue.800" />
</Button>
</HStack>
isEditing={currentlyEditingId === variable.id}
setIsEditing={(isEditing) => {
if (isEditing) {
setCurrentlyEditingId(variable.id);
} else {
setCurrentlyEditingId(null);
}
}}
/>
))}
</HStack>
</Stack>
</VStack>
{currentlyEditingId !== "new" && (
<Button
colorScheme="blue"
size="sm"
onClick={() => setCurrentlyEditingId("new")}
alignSelf="end"
>
New Variable
</Button>
)}
{currentlyEditingId === "new" && (
<HStack w="full">
<FloatingLabelInput
flex={1}
label="New Variable"
value={newVariable}
onChange={(e) => setNewVariable(e.target.value)}
onKeyDown={(e) => {
if (e.key === "Enter") {
e.preventDefault();
onAddVar();
}
// If the user types a space, replace it with an underscore
if (e.key === " ") {
e.preventDefault();
setNewVariable((v) => v && `${v}_`);
}
}}
/>
<Button size="sm" onClick={() => setCurrentlyEditingId(null)}>
Cancel
</Button>
<Button size="sm" colorScheme="blue" onClick={onAddVar}>
Save
</Button>
</HStack>
)}
</VStack>
</Stack>
);
}

View File

@@ -1,7 +1,7 @@
import { HStack, Icon, IconButton, Spinner, Tooltip, useDisclosure } from "@chakra-ui/react";
import { BsArrowClockwise, BsInfoCircle } from "react-icons/bs";
import { useExperimentAccess } from "~/utils/hooks";
import ExpandedModal from "./PromptModal";
import PromptModal from "./PromptModal";
import { type RouterOutputs } from "~/utils/api";
export const CellOptions = ({
@@ -32,7 +32,7 @@ export const CellOptions = ({
variant="ghost"
/>
</Tooltip>
<ExpandedModal cell={cell} disclosure={modalDisclosure} />
<PromptModal cell={cell} disclosure={modalDisclosure} />
</>
)}
{canModify && (

View File

@@ -0,0 +1,29 @@
import { type StackProps, VStack } from "@chakra-ui/react";
import { type RouterOutputs } from "~/utils/api";
import { type Scenario } from "../types";
import { CellOptions } from "./CellOptions";
import { OutputStats } from "./OutputStats";
const CellWrapper: React.FC<
StackProps & {
cell: RouterOutputs["scenarioVariantCells"]["get"] | undefined;
hardRefetching: boolean;
hardRefetch: () => void;
mostRecentResponse:
| NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>["modelResponses"][0]
| undefined;
scenario: Scenario;
}
> = ({ children, cell, hardRefetching, hardRefetch, mostRecentResponse, scenario, ...props }) => (
<VStack w="full" alignItems="flex-start" {...props} px={2} py={2} h="100%">
{cell && (
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} cell={cell} />
)}
<VStack w="full" alignItems="flex-start" maxH={500} overflowY="auto" flex={1}>
{children}
</VStack>
{mostRecentResponse && <OutputStats modelResponse={mostRecentResponse} scenario={scenario} />}
</VStack>
);
export default CellWrapper;

View File

@@ -1,17 +1,16 @@
import { api } from "~/utils/api";
import { type PromptVariant, type Scenario } from "../types";
import { type StackProps, Text, VStack } from "@chakra-ui/react";
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
import { Text } from "@chakra-ui/react";
import stringify from "json-stringify-pretty-compact";
import { Fragment, useEffect, useState, type ReactElement } from "react";
import SyntaxHighlighter from "react-syntax-highlighter";
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
import stringify from "json-stringify-pretty-compact";
import { type ReactElement, useState, useEffect, Fragment, useCallback } from "react";
import useSocket from "~/utils/useSocket";
import { OutputStats } from "./OutputStats";
import { RetryCountdown } from "./RetryCountdown";
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useScenarioVars } from "~/utils/hooks";
import useSocket from "~/utils/useSocket";
import { type PromptVariant, type Scenario } from "../types";
import CellWrapper from "./CellWrapper";
import { ResponseLog } from "./ResponseLog";
import { CellOptions } from "./TopActions";
import { RetryCountdown } from "./RetryCountdown";
const WAITING_MESSAGE_INTERVAL = 20000;
@@ -23,10 +22,7 @@ export default function OutputCell({
variant: PromptVariant;
}): ReactElement | null {
const utils = api.useContext();
const experiment = useExperiment();
const vars = api.templateVars.list.useQuery({
experimentId: experiment.data?.id ?? "",
}).data;
const vars = useScenarioVars().data;
const scenarioVariables = scenario.variableValues as Record<string, string>;
const templateHasVariables =
@@ -36,7 +32,7 @@ export default function OutputCell({
if (!templateHasVariables) disabledReason = "Add a value to the scenario variables to see output";
const [refetchInterval, setRefetchInterval] = useState(0);
const [refetchInterval, setRefetchInterval] = useState<number | false>(false);
const { data: cell, isLoading: queryLoading } = api.scenarioVariantCells.get.useQuery(
{ scenarioId: scenario.id, variantId: variant.id },
{ refetchInterval },
@@ -67,42 +63,34 @@ export default function OutputCell({
cell.retrievalStatus === "PENDING" ||
cell.retrievalStatus === "IN_PROGRESS" ||
hardRefetching;
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : 0), [awaitingOutput]);
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : false), [awaitingOutput]);
// TODO: disconnect from socket if we're not streaming anymore
const streamedMessage = useSocket<OutputSchema>(cell?.id);
const mostRecentResponse = cell?.modelResponses[cell.modelResponses.length - 1];
const CellWrapper = useCallback(
({ children, ...props }: StackProps) => (
<VStack w="full" alignItems="flex-start" {...props} px={2} py={2} h="100%">
{cell && (
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} cell={cell} />
)}
<VStack w="full" alignItems="flex-start" maxH={500} overflowY="auto" flex={1}>
{children}
</VStack>
{mostRecentResponse && (
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
)}
</VStack>
),
[hardRefetching, hardRefetch, mostRecentResponse, scenario, cell],
);
const wrapperProps: Parameters<typeof CellWrapper>[0] = {
cell,
hardRefetching,
hardRefetch,
mostRecentResponse,
scenario,
};
if (!vars) return null;
if (!cell && !fetchingOutput)
return (
<CellWrapper>
<CellWrapper {...wrapperProps}>
<Text color="gray.500">Error retrieving output</Text>
</CellWrapper>
);
if (cell && cell.errorMessage) {
return (
<CellWrapper>
<CellWrapper {...wrapperProps}>
<Text color="red.500">{cell.errorMessage}</Text>
</CellWrapper>
);
@@ -110,11 +98,16 @@ export default function OutputCell({
if (disabledReason) return <Text color="gray.500">{disabledReason}</Text>;
const showLogs = !streamedMessage && !mostRecentResponse?.output;
const showLogs = !streamedMessage && !mostRecentResponse?.respPayload;
if (showLogs)
return (
<CellWrapper alignItems="flex-start" fontFamily="inconsolata, monospace" spacing={0}>
<CellWrapper
{...wrapperProps}
alignItems="flex-start"
fontFamily="inconsolata, monospace"
spacing={0}
>
{cell?.jobQueuedAt && <ResponseLog time={cell.jobQueuedAt} title="Job queued" />}
{cell?.jobStartedAt && <ResponseLog time={cell.jobStartedAt} title="Job started" />}
{cell?.modelResponses?.map((response) => {
@@ -123,8 +116,13 @@ export default function OutputCell({
? response.receivedAt.getTime()
: Date.now();
if (response.requestedAt) {
numWaitingMessages = Math.floor(
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
numWaitingMessages = Math.min(
Math.floor(
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
),
// Don't try to render more than 15, it'll use too much CPU and
// break the page
15,
);
}
return (
@@ -163,15 +161,15 @@ export default function OutputCell({
</CellWrapper>
);
const normalizedOutput = mostRecentResponse?.output
? provider.normalizeOutput(mostRecentResponse?.output)
const normalizedOutput = mostRecentResponse?.respPayload
? provider.normalizeOutput(mostRecentResponse?.respPayload)
: streamedMessage
? provider.normalizeOutput(streamedMessage)
: null;
if (mostRecentResponse?.output && normalizedOutput?.type === "json") {
if (mostRecentResponse?.respPayload && normalizedOutput?.type === "json") {
return (
<CellWrapper>
<CellWrapper {...wrapperProps}>
<SyntaxHighlighter
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
language="json"
@@ -190,8 +188,8 @@ export default function OutputCell({
const contentToDisplay = (normalizedOutput?.type === "text" && normalizedOutput.value) || "";
return (
<CellWrapper>
<Text>{contentToDisplay}</Text>
<CellWrapper {...wrapperProps}>
<Text whiteSpace="pre-wrap">{contentToDisplay}</Text>
</CellWrapper>
);
}

View File

@@ -19,8 +19,8 @@ export const OutputStats = ({
? modelResponse.receivedAt.getTime() - modelResponse.requestedAt.getTime()
: 0;
const promptTokens = modelResponse.promptTokens;
const completionTokens = modelResponse.completionTokens;
const inputTokens = modelResponse.inputTokens;
const outputTokens = modelResponse.outputTokens;
return (
<HStack
@@ -55,8 +55,8 @@ export const OutputStats = ({
</HStack>
{modelResponse.cost && (
<CostTooltip
promptTokens={promptTokens}
completionTokens={completionTokens}
inputTokens={inputTokens}
outputTokens={outputTokens}
cost={modelResponse.cost}
>
<HStack spacing={0}>

View File

@@ -5,30 +5,103 @@ import {
ModalContent,
ModalHeader,
ModalOverlay,
VStack,
Text,
Box,
type UseDisclosureReturn,
Link,
} from "@chakra-ui/react";
import { type RouterOutputs } from "~/utils/api";
import { api, type RouterOutputs } from "~/utils/api";
import { JSONTree } from "react-json-tree";
import CopiableCode from "~/components/CopiableCode";
export default function ExpandedModal(props: {
const theme = {
scheme: "chalk",
author: "chris kempson (http://chriskempson.com)",
base00: "transparent",
base01: "#202020",
base02: "#303030",
base03: "#505050",
base04: "#b0b0b0",
base05: "#d0d0d0",
base06: "#e0e0e0",
base07: "#f5f5f5",
base08: "#fb9fb1",
base09: "#eda987",
base0A: "#ddb26f",
base0B: "#acc267",
base0C: "#12cfc0",
base0D: "#6fc2ef",
base0E: "#e1a3ee",
base0F: "#deaf8f",
};
export default function PromptModal(props: {
cell: NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>;
disclosure: UseDisclosureReturn;
}) {
const { data } = api.scenarioVariantCells.getTemplatedPromptMessage.useQuery(
{
cellId: props.cell.id,
},
{
enabled: props.disclosure.isOpen,
},
);
return (
<Modal isOpen={props.disclosure.isOpen} onClose={props.disclosure.onClose} size="2xl">
<Modal isOpen={props.disclosure.isOpen} onClose={props.disclosure.onClose} size="xl">
<ModalOverlay />
<ModalContent>
<ModalHeader>Prompt</ModalHeader>
<ModalHeader>Prompt Details</ModalHeader>
<ModalCloseButton />
<ModalBody>
<JSONTree
data={props.cell.prompt}
invertTheme={true}
theme="chalk"
shouldExpandNodeInitially={() => true}
getItemString={() => ""}
hideRoot
/>
<VStack py={4} w="">
<VStack w="full" alignItems="flex-start">
<Text fontWeight="bold">Full Prompt</Text>
<Box
w="full"
p={4}
alignItems="flex-start"
backgroundColor="blackAlpha.800"
borderRadius={4}
>
<JSONTree
data={props.cell.prompt}
theme={theme}
shouldExpandNodeInitially={() => true}
getItemString={() => ""}
hideRoot
/>
</Box>
</VStack>
{data?.templatedPrompt && (
<VStack w="full" mt={4} alignItems="flex-start">
<Text fontWeight="bold">Templated prompt message:</Text>
<CopiableCode
w="full"
// bgColor="gray.100"
p={4}
borderWidth={1}
whiteSpace="pre-wrap"
code={data.templatedPrompt}
/>
</VStack>
)}
{data?.learnMoreUrl && (
<Link
href={data.learnMoreUrl}
isExternal
color="blue.500"
fontWeight="bold"
fontSize="sm"
mt={4}
alignSelf="flex-end"
>
Learn More
</Link>
)}
</VStack>
</ModalBody>
</ModalContent>
</Modal>

View File

@@ -1,7 +1,7 @@
import { isEqual } from "lodash-es";
import { useEffect, useState, type DragEvent } from "react";
import { api } from "~/utils/api";
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
import { useExperimentAccess, useHandledAsyncCallback, useScenarioVars } from "~/utils/hooks";
import { type Scenario } from "./types";
import {
@@ -41,8 +41,7 @@ export default function ScenarioEditor({
if (savedValues) setValues(savedValues);
}, [savedValues]);
const experiment = useExperiment();
const vars = api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
const vars = useScenarioVars();
const variableLabels = vars.data?.map((v) => v.label) ?? [];
@@ -112,25 +111,23 @@ export default function ScenarioEditor({
onDrop={onReorder}
backgroundColor={isDragTarget ? "gray.100" : "transparent"}
>
{variableLabels.length === 0 ? (
<Box color="gray.500">
{vars.data ? "No scenario variables configured" : "Loading..."}
</Box>
) : (
{
<VStack spacing={4} flex={1} py={2}>
<HStack justifyContent="space-between" w="100%" align="center" spacing={0}>
<Text flex={1}>Scenario</Text>
<Tooltip label="Expand" hasArrow>
<IconButton
aria-label="Expand"
icon={<Icon as={BsArrowsAngleExpand} boxSize={3} />}
onClick={() => setScenarioEditorModalOpen(true)}
size="xs"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
{variableLabels.length && (
<Tooltip label="Expand" hasArrow>
<IconButton
aria-label="Expand"
icon={<Icon as={BsArrowsAngleExpand} boxSize={3} />}
onClick={() => setScenarioEditorModalOpen(true)}
size="xs"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
)}
{canModify && props.canHide && (
<Tooltip label="Delete" hasArrow>
<IconButton
@@ -151,31 +148,38 @@ export default function ScenarioEditor({
</Tooltip>
)}
</HStack>
{variableLabels.map((key) => {
const value = values[key] ?? "";
return (
<FloatingLabelInput
key={key}
label={key}
isDisabled={!canModify}
style={{ width: "100%" }}
maxHeight={32}
value={value}
onChange={(e) => {
setValues((prev) => ({ ...prev, [key]: e.target.value }));
}}
onKeyDown={(e) => {
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
e.preventDefault();
e.currentTarget.blur();
onSave();
}
}}
onMouseEnter={() => setVariableInputHovered(true)}
onMouseLeave={() => setVariableInputHovered(false)}
/>
);
})}
{variableLabels.length === 0 ? (
<Box color="gray.500">
{vars.data ? "No scenario variables configured" : "Loading..."}
</Box>
) : (
variableLabels.map((key) => {
const value = values[key] ?? "";
return (
<FloatingLabelInput
key={key}
label={key}
isDisabled={!canModify}
style={{ width: "100%" }}
maxHeight={32}
value={value}
onChange={(e) => {
setValues((prev) => ({ ...prev, [key]: e.target.value }));
}}
onKeyDown={(e) => {
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
e.preventDefault();
e.currentTarget.blur();
onSave();
}
}}
onMouseEnter={() => setVariableInputHovered(true)}
onMouseLeave={() => setVariableInputHovered(false)}
/>
);
})
)}
{hasChanged && (
<HStack justify="right">
<Button
@@ -193,7 +197,7 @@ export default function ScenarioEditor({
</HStack>
)}
</VStack>
)}
}
</HStack>
{scenarioEditorModalOpen && (
<ScenarioEditorModal

View File

@@ -58,18 +58,18 @@ export const ScenarioEditorModal = ({
await utils.scenarios.list.invalidate();
}, [mutation, values]);
const vars = api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
const vars = api.scenarioVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
const variableLabels = vars.data?.map((v) => v.label) ?? [];
return (
<Modal
isOpen
onClose={onClose}
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "5xl", xl: "7xl" }}
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "4xl", xl: "5xl" }}
>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader />
<ModalHeader>Edit Scenario</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack spacing={8}>

View File

@@ -1,21 +1,16 @@
import { type StackProps } from "@chakra-ui/react";
import { useScenarios } from "~/utils/hooks";
import Paginator from "../Paginator";
const ScenarioPaginator = () => {
const ScenarioPaginator = (props: StackProps) => {
const { data } = useScenarios();
if (!data) return null;
const { scenarios, startIndex, lastPage, count } = data;
const { count } = data;
return (
<Paginator
numItemsLoaded={scenarios.length}
startIndex={startIndex}
lastPage={lastPage}
count={count}
/>
);
return <Paginator count={count} condense {...props} />;
};
export default ScenarioPaginator;

View File

@@ -10,6 +10,8 @@ const ScenarioRow = (props: {
variants: PromptVariant[];
canHide: boolean;
rowStart: number;
isFirst: boolean;
isLast: boolean;
}) => {
const [isHovered, setIsHovered] = useState(false);
@@ -21,10 +23,14 @@ const ScenarioRow = (props: {
onMouseEnter={() => setIsHovered(true)}
onMouseLeave={() => setIsHovered(false)}
sx={isHovered ? highlightStyle : undefined}
borderLeftWidth={1}
{...borders}
bgColor="white"
rowStart={props.rowStart}
colStart={1}
borderLeftWidth={1}
borderTopWidth={props.isFirst ? 1 : 0}
borderTopLeftRadius={props.isFirst ? 8 : 0}
borderBottomLeftRadius={props.isLast ? 8 : 0}
{...borders}
>
<ScenarioEditor scenario={props.scenario} hovered={isHovered} canHide={props.canHide} />
</GridItem>
@@ -34,8 +40,12 @@ const ScenarioRow = (props: {
onMouseEnter={() => setIsHovered(true)}
onMouseLeave={() => setIsHovered(false)}
sx={isHovered ? highlightStyle : undefined}
bgColor="white"
rowStart={props.rowStart}
colStart={i + 2}
borderTopWidth={props.isFirst ? 1 : 0}
borderTopRightRadius={props.isFirst && i === props.variants.length - 1 ? 8 : 0}
borderBottomRightRadius={props.isLast && i === props.variants.length - 1 ? 8 : 0}
{...borders}
>
<OutputCell key={variant.id} scenario={props.scenario} variant={variant} />

View File

@@ -48,7 +48,7 @@ export const ScenariosHeader = () => {
);
return (
<HStack w="100%" pb={cellPadding.y} pt={0} align="center" spacing={0}>
<HStack w="100%" py={cellPadding.y} px={cellPadding.x} align="center" spacing={0}>
<Text fontSize={16} fontWeight="bold">
Scenarios ({scenarios.data?.count})
</Text>
@@ -57,11 +57,16 @@ export const ScenariosHeader = () => {
<MenuButton
as={IconButton}
mt={1}
ml={2}
variant="ghost"
aria-label="Edit Scenarios"
icon={<Icon as={loading ? Spinner : BsGear} />}
maxW={8}
minW={8}
minH={8}
maxH={8}
/>
<MenuList fontSize="md" zIndex="dropdown" mt={-3}>
<MenuList fontSize="md" zIndex="dropdown" mt={-1}>
<MenuItem
icon={<Icon as={BsPlus} boxSize={6} mx="-5px" />}
onClick={() => onAddScenario(false)}
@@ -72,7 +77,7 @@ export const ScenariosHeader = () => {
Autogenerate Scenario
</MenuItem>
<MenuItem icon={<BsPencil />} onClick={openDrawer}>
Edit Vars
Add or Remove Variables
</MenuItem>
</MenuList>
</Menu>

View File

@@ -17,18 +17,22 @@ export default function VariantStats(props: { variant: PromptVariant }) {
initialData: {
evalResults: [],
overallCost: 0,
promptTokens: 0,
completionTokens: 0,
inputTokens: 0,
outputTokens: 0,
scenarioCount: 0,
outputCount: 0,
awaitingCompletions: false,
awaitingEvals: false,
},
refetchInterval,
},
);
// Poll every two seconds while we are waiting for LLM retrievals to finish
useEffect(() => setRefetchInterval(data.awaitingEvals ? 5000 : 0), [data.awaitingEvals]);
// Poll every five seconds while we are waiting for LLM retrievals to finish
useEffect(
() => setRefetchInterval(data.awaitingCompletions || data.awaitingEvals ? 5000 : 0),
[data.awaitingCompletions, data.awaitingEvals],
);
const [passColor, neutralColor, failColor] = useToken("colors", [
"green.500",
@@ -68,8 +72,8 @@ export default function VariantStats(props: { variant: PromptVariant }) {
</HStack>
{data.overallCost && (
<CostTooltip
promptTokens={data.promptTokens}
completionTokens={data.completionTokens}
inputTokens={data.inputTokens}
outputTokens={data.outputTokens}
cost={data.overallCost}
>
<HStack spacing={0} align="center" color="gray.500">

View File

@@ -53,20 +53,29 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
colStart: i + 2,
borderLeftWidth: i === 0 ? 1 : 0,
marginLeft: i === 0 ? "-1px" : 0,
backgroundColor: "gray.100",
backgroundColor: "white",
};
const isFirst = i === 0;
const isLast = i === variants.data.length - 1;
return (
<Fragment key={variant.uiId}>
<VariantHeader
variant={variant}
canHide={variants.data.length > 1}
rowStart={1}
borderTopLeftRadius={isFirst ? 8 : 0}
borderTopRightRadius={isLast ? 8 : 0}
{...sharedProps}
/>
<GridItem rowStart={2} {...sharedProps}>
<VariantEditor variant={variant} />
</GridItem>
<GridItem rowStart={3} {...sharedProps}>
<GridItem
rowStart={3}
{...sharedProps}
borderBottomLeftRadius={isFirst ? 8 : 0}
borderBottomRightRadius={isLast ? 8 : 0}
>
<VariantStats variant={variant} />
</GridItem>
</Fragment>
@@ -77,7 +86,6 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
colSpan={allCols - 1}
rowStart={variantHeaderRows + 1}
colStart={1}
{...borders}
borderRightWidth={0}
>
<ScenariosHeader />
@@ -90,6 +98,8 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
scenario={scenario}
variants={variants.data}
canHide={visibleScenariosCount > 1}
isFirst={i === 0}
isLast={i === visibleScenariosCount - 1}
/>
))}
<GridItem

View File

@@ -1,77 +1,119 @@
import { Box, HStack, IconButton } from "@chakra-ui/react";
import {
BsChevronDoubleLeft,
BsChevronDoubleRight,
BsChevronLeft,
BsChevronRight,
} from "react-icons/bs";
import { usePage } from "~/utils/hooks";
import { HStack, IconButton, Text, Select, type StackProps, Icon } from "@chakra-ui/react";
import React, { useCallback } from "react";
import { FiChevronsLeft, FiChevronsRight, FiChevronLeft, FiChevronRight } from "react-icons/fi";
import { usePageParams } from "~/utils/hooks";
const pageSizeOptions = [10, 25, 50, 100];
const Paginator = ({
numItemsLoaded,
startIndex,
lastPage,
count,
}: {
numItemsLoaded: number;
startIndex: number;
lastPage: number;
count: number;
}) => {
const [page, setPage] = usePage();
condense,
...props
}: { count: number; condense?: boolean } & StackProps) => {
const { page, pageSize, setPageParams } = usePageParams();
const lastPage = Math.ceil(count / pageSize);
const updatePageSize = useCallback(
(newPageSize: number) => {
const newPage = Math.floor(((page - 1) * pageSize) / newPageSize) + 1;
setPageParams({ page: newPage, pageSize: newPageSize }, "replace");
},
[page, pageSize, setPageParams],
);
const nextPage = () => {
if (page < lastPage) {
setPage(page + 1, "replace");
setPageParams({ page: page + 1 }, "replace");
}
};
const prevPage = () => {
if (page > 1) {
setPage(page - 1, "replace");
setPageParams({ page: page - 1 }, "replace");
}
};
const goToLastPage = () => setPage(lastPage, "replace");
const goToFirstPage = () => setPage(1, "replace");
const goToLastPage = () => setPageParams({ page: lastPage }, "replace");
const goToFirstPage = () => setPageParams({ page: 1 }, "replace");
if (count === 0) return null;
return (
<HStack pt={4}>
<IconButton
variant="ghost"
size="sm"
onClick={goToFirstPage}
isDisabled={page === 1}
aria-label="Go to first page"
icon={<BsChevronDoubleLeft />}
/>
<IconButton
variant="ghost"
size="sm"
onClick={prevPage}
isDisabled={page === 1}
aria-label="Previous page"
icon={<BsChevronLeft />}
/>
<Box>
{startIndex}-{startIndex + numItemsLoaded - 1} / {count}
</Box>
<IconButton
variant="ghost"
size="sm"
onClick={nextPage}
isDisabled={page === lastPage}
aria-label="Next page"
icon={<BsChevronRight />}
/>
<IconButton
variant="ghost"
size="sm"
onClick={goToLastPage}
isDisabled={page === lastPage}
aria-label="Go to last page"
icon={<BsChevronDoubleRight />}
/>
<HStack
pt={4}
spacing={8}
justifyContent={condense ? "flex-start" : "space-between"}
alignItems="center"
w="full"
{...props}
>
{!condense && (
<>
<HStack>
<Text>Rows</Text>
<Select
value={pageSize}
onChange={(e) => updatePageSize(parseInt(e.target.value))}
w={20}
backgroundColor="white"
>
{pageSizeOptions.map((option) => (
<option key={option} value={option}>
{option}
</option>
))}
</Select>
</HStack>
<Text>
Page {page} of {lastPage}
</Text>
</>
)}
<HStack>
<IconButton
variant="outline"
size="sm"
onClick={goToFirstPage}
isDisabled={page === 1}
aria-label="Go to first page"
icon={<Icon as={FiChevronsLeft} boxSize={5} strokeWidth={1.5} />}
bgColor="white"
/>
<IconButton
variant="outline"
size="sm"
onClick={prevPage}
isDisabled={page === 1}
aria-label="Previous page"
icon={<Icon as={FiChevronLeft} boxSize={5} strokeWidth={1.5} />}
bgColor="white"
/>
{condense && (
<Text>
Page {page} of {lastPage}
</Text>
)}
<IconButton
variant="outline"
size="sm"
onClick={nextPage}
isDisabled={page === lastPage}
aria-label="Next page"
icon={<Icon as={FiChevronRight} boxSize={5} strokeWidth={1.5} />}
bgColor="white"
/>
<IconButton
variant="outline"
size="sm"
onClick={goToLastPage}
isDisabled={page === lastPage}
aria-label="Go to last page"
icon={<Icon as={FiChevronsRight} boxSize={5} strokeWidth={1.5} />}
bgColor="white"
/>
</HStack>
</HStack>
);
};

View File

@@ -75,7 +75,7 @@ export default function VariantHeader(
padding={0}
sx={{
position: "sticky",
top: "0",
top: "-2",
// Ensure that the menu always appears above the sticky header of other variants
zIndex: menuOpen ? "dropdown" : 10,
}}
@@ -84,6 +84,7 @@ export default function VariantHeader(
>
<HStack
spacing={2}
py={2}
alignItems="flex-start"
minH={headerMinHeight}
draggable={!isInputHovered}
@@ -102,7 +103,9 @@ export default function VariantHeader(
setIsDragTarget(false);
}}
onDrop={onReorder}
backgroundColor={isDragTarget ? "gray.200" : "gray.100"}
backgroundColor={isDragTarget ? "gray.200" : "white"}
borderTopLeftRadius={gridItemProps.borderTopLeftRadius}
borderTopRightRadius={gridItemProps.borderTopRightRadius}
h="full"
>
<Icon

View File

@@ -1,201 +0,0 @@
import {
Box,
Card,
CardHeader,
Heading,
Table,
Tbody,
Td,
Th,
Thead,
Tr,
Tooltip,
Collapse,
HStack,
VStack,
IconButton,
useToast,
Icon,
Button,
ButtonGroup,
} from "@chakra-ui/react";
import dayjs from "dayjs";
import relativeTime from "dayjs/plugin/relativeTime";
import { ChevronUpIcon, ChevronDownIcon, CopyIcon } from "lucide-react";
import { useMemo, useState } from "react";
import { type RouterOutputs, api } from "~/utils/api";
import SyntaxHighlighter from "react-syntax-highlighter";
import { atelierCaveLight } from "react-syntax-highlighter/dist/cjs/styles/hljs";
import stringify from "json-stringify-pretty-compact";
import Link from "next/link";
dayjs.extend(relativeTime);
type LoggedCall = RouterOutputs["dashboard"]["loggedCalls"][0];
const FormattedJson = ({ json }: { json: any }) => {
const jsonString = stringify(json, { maxLength: 40 });
const toast = useToast();
const copyToClipboard = async (text: string) => {
try {
await navigator.clipboard.writeText(text);
toast({
title: "Copied to clipboard",
status: "success",
duration: 2000,
});
} catch (err) {
toast({
title: "Failed to copy to clipboard",
status: "error",
duration: 2000,
});
}
};
return (
<Box position="relative" fontSize="sm" borderRadius="md" overflow="hidden">
<SyntaxHighlighter
customStyle={{ overflowX: "unset" }}
language="json"
style={atelierCaveLight}
lineProps={{
style: { wordBreak: "break-all", whiteSpace: "pre-wrap" },
}}
wrapLines
>
{jsonString}
</SyntaxHighlighter>
<IconButton
aria-label="Copy"
icon={<CopyIcon />}
position="absolute"
top={1}
right={1}
size="xs"
variant="ghost"
onClick={() => void copyToClipboard(jsonString)}
/>
</Box>
);
};
function TableRow({
loggedCall,
isExpanded,
onToggle,
}: {
loggedCall: LoggedCall;
isExpanded: boolean;
onToggle: () => void;
}) {
const isError = loggedCall.modelResponse?.respStatus !== 200;
const timeAgo = dayjs(loggedCall.startTime).fromNow();
const fullTime = dayjs(loggedCall.startTime).toString();
const model = useMemo(
() => loggedCall.tags.find((tag) => tag.name.startsWith("$model"))?.value,
[loggedCall.tags],
);
return (
<>
<Tr
onClick={onToggle}
key={loggedCall.id}
_hover={{ bgColor: "gray.100", cursor: "pointer" }}
sx={{
"> td": { borderBottom: "none" },
}}
>
<Td>
<Icon boxSize={6} as={isExpanded ? ChevronUpIcon : ChevronDownIcon} />
</Td>
<Td>
<Tooltip label={fullTime} placement="top">
<Box whiteSpace="nowrap" minW="120px">
{timeAgo}
</Box>
</Tooltip>
</Td>
<Td width="100%">{model}</Td>
<Td isNumeric>{((loggedCall.modelResponse?.durationMs ?? 0) / 1000).toFixed(2)}s</Td>
<Td isNumeric>{loggedCall.modelResponse?.inputTokens}</Td>
<Td isNumeric>{loggedCall.modelResponse?.outputTokens}</Td>
<Td sx={{ color: isError ? "red.500" : "green.500", fontWeight: "semibold" }} isNumeric>
{loggedCall.modelResponse?.respStatus ?? "No response"}
</Td>
</Tr>
<Tr>
<Td colSpan={8} p={0}>
<Collapse in={isExpanded} unmountOnExit={true}>
<VStack p={4} align="stretch">
<HStack align="stretch">
<VStack flex={1} align="stretch">
<Heading size="sm">Input</Heading>
<FormattedJson json={loggedCall.modelResponse?.reqPayload} />
</VStack>
<VStack flex={1} align="stretch">
<Heading size="sm">Output</Heading>
<FormattedJson json={loggedCall.modelResponse?.respPayload} />
</VStack>
</HStack>
<ButtonGroup alignSelf="flex-end">
<Button as={Link} colorScheme="blue" href={{ pathname: "/experiments" }}>
Experiments
</Button>
</ButtonGroup>
</VStack>
</Collapse>
</Td>
</Tr>
</>
);
}
export default function LoggedCallTable() {
const [expandedRow, setExpandedRow] = useState<string | null>(null);
const loggedCalls = api.dashboard.loggedCalls.useQuery({});
return (
<Card variant="outline" width="100%" overflow="hidden">
<CardHeader>
<Heading as="h3" size="sm">
Logged Calls
</Heading>
</CardHeader>
<Table>
<Thead>
<Tr>
<Th />
<Th>Time</Th>
<Th>Model</Th>
<Th isNumeric>Duration</Th>
<Th isNumeric>Input tokens</Th>
<Th isNumeric>Output tokens</Th>
<Th isNumeric>Status</Th>
</Tr>
</Thead>
<Tbody>
{loggedCalls.data?.map((loggedCall) => {
return (
<TableRow
key={loggedCall.id}
loggedCall={loggedCall}
isExpanded={loggedCall.id === expandedRow}
onToggle={() => {
if (loggedCall.id === expandedRow) {
setExpandedRow(null);
} else {
setExpandedRow(loggedCall.id);
}
}}
/>
);
})}
</Tbody>
</Table>
</Card>
);
}

View File

@@ -0,0 +1,46 @@
import { Card, CardHeader, Heading, Table, Tbody, HStack, Button, Text } from "@chakra-ui/react";
import { useState } from "react";
import Link from "next/link";
import { useLoggedCalls } from "~/utils/hooks";
import { TableHeader, TableRow } from "../requestLogs/TableRow";
export default function LoggedCallsTable() {
const [expandedRow, setExpandedRow] = useState<string | null>(null);
const { data: loggedCalls } = useLoggedCalls();
return (
<Card width="100%" overflow="hidden">
<CardHeader>
<HStack justifyContent="space-between">
<Heading as="h3" size="sm">
Request Logs
</Heading>
<Button as={Link} href="/request-logs" variant="ghost" colorScheme="blue">
<Text>View All</Text>
</Button>
</HStack>
</CardHeader>
<Table>
<TableHeader />
<Tbody>
{loggedCalls?.calls.map((loggedCall) => {
return (
<TableRow
key={loggedCall.id}
loggedCall={loggedCall}
isExpanded={loggedCall.id === expandedRow}
onToggle={() => {
if (loggedCall.id === expandedRow) {
setExpandedRow(null);
} else {
setExpandedRow(loggedCall.id);
}
}}
/>
);
})}
</Tbody>
</Table>
</Card>
);
}

View File

@@ -0,0 +1,61 @@
import {
ResponsiveContainer,
LineChart,
Line,
XAxis,
YAxis,
CartesianGrid,
Tooltip,
Legend,
} from "recharts";
import { useMemo } from "react";
import { useSelectedProject } from "~/utils/hooks";
import dayjs from "~/utils/dayjs";
import { api } from "~/utils/api";
export default function UsageGraph() {
const { data: selectedProject } = useSelectedProject();
const stats = api.dashboard.stats.useQuery(
{ projectId: selectedProject?.id ?? "" },
{ enabled: !!selectedProject },
);
const data = useMemo(() => {
return (
stats.data?.periods.map(({ period, numQueries, cost }) => ({
period,
Requests: numQueries,
"Total Spent (USD)": parseFloat(cost.toString()),
})) || []
);
}, [stats.data]);
return (
<ResponsiveContainer width="100%" height={400}>
<LineChart data={data} margin={{ top: 5, right: 20, left: 10, bottom: 5 }}>
<XAxis dataKey="period" tickFormatter={(str: string) => dayjs(str).format("MMM D")} />
<YAxis yAxisId="left" dataKey="Requests" orientation="left" stroke="#8884d8" />
<YAxis
yAxisId="right"
dataKey="Total Spent (USD)"
orientation="right"
unit="$"
stroke="#82ca9d"
/>
<Tooltip />
<Legend />
<CartesianGrid stroke="#f5f5f5" />
<Line dataKey="Requests" stroke="#8884d8" yAxisId="left" dot={false} strokeWidth={2} />
<Line
dataKey="Total Spent (USD)"
stroke="#82ca9d"
yAxisId="right"
dot={false}
strokeWidth={2}
/>
</LineChart>
</ResponsiveContainer>
);
}

View File

@@ -1,21 +1,16 @@
import { type StackProps } from "@chakra-ui/react";
import { useDatasetEntries } from "~/utils/hooks";
import Paginator from "../Paginator";
const DatasetEntriesPaginator = () => {
const DatasetEntriesPaginator = (props: StackProps) => {
const { data } = useDatasetEntries();
if (!data) return null;
const { entries, startIndex, lastPage, count } = data;
const { count } = data;
return (
<Paginator
numItemsLoaded={entries.length}
startIndex={startIndex}
lastPage={lastPage}
count={count}
/>
);
return <Paginator count={count} {...props} />;
};
export default DatasetEntriesPaginator;

View File

@@ -7,39 +7,35 @@ import {
Spinner,
AspectRatio,
SkeletonText,
Card,
} from "@chakra-ui/react";
import { RiFlaskLine } from "react-icons/ri";
import { formatTimePast } from "~/utils/dayjs";
import Link from "next/link";
import { useRouter } from "next/router";
import { BsPlusSquare } from "react-icons/bs";
import { api } from "~/utils/api";
import { RouterOutputs, api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks";
import { useAppStore } from "~/state/store";
type ExperimentData = {
testScenarioCount: number;
promptVariantCount: number;
id: string;
label: string;
sortIndex: number;
createdAt: Date;
updatedAt: Date;
};
export const ExperimentCard = ({ exp }: { exp: ExperimentData }) => {
export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["list"][0] }) => {
return (
<AspectRatio ratio={1.2} w="full">
<Card
w="full"
h="full"
cursor="pointer"
p={4}
bg="white"
borderRadius={4}
_hover={{ bg: "gray.100" }}
transition="background 0.2s"
aspectRatio={1.2}
>
<VStack
as={Link}
href={{ pathname: "/experiments/[id]", query: { id: exp.id } }}
bg="gray.50"
_hover={{ bg: "gray.100" }}
transition="background 0.2s"
cursor="pointer"
borderColor="gray.200"
borderWidth={1}
p={4}
w="full"
h="full"
href={{ pathname: "/experiments/[experimentSlug]", query: { experimentSlug: exp.slug } }}
justify="space-between"
>
<HStack w="full" color="gray.700" justify="center">
@@ -57,7 +53,7 @@ export const ExperimentCard = ({ exp }: { exp: ExperimentData }) => {
<Text flex={1}>Updated {formatTimePast(exp.updatedAt)}</Text>
</HStack>
</VStack>
</AspectRatio>
</Card>
);
};
@@ -83,36 +79,36 @@ export const NewExperimentCard = () => {
projectId: selectedProjectId ?? "",
});
await router.push({
pathname: "/experiments/[id]",
query: { id: newExperiment.id },
pathname: "/experiments/[experimentSlug]",
query: { experimentSlug: newExperiment.slug },
});
}, [createMutation, router, selectedProjectId]);
return (
<AspectRatio ratio={1.2} w="full">
<VStack
align="center"
justify="center"
_hover={{ cursor: "pointer", bg: "gray.50" }}
transition="background 0.2s"
cursor="pointer"
borderColor="gray.200"
borderWidth={1}
p={4}
onClick={createExperiment}
>
<Card
w="full"
h="full"
cursor="pointer"
p={4}
bg="white"
borderRadius={4}
_hover={{ bg: "gray.100" }}
transition="background 0.2s"
aspectRatio={1.2}
>
<VStack align="center" justify="center" w="full" h="full" p={4} onClick={createExperiment}>
<Icon as={isLoading ? Spinner : BsPlusSquare} boxSize={8} />
<Text display={{ base: "none", md: "block" }} ml={2}>
New Experiment
</Text>
</VStack>
</AspectRatio>
</Card>
);
};
export const ExperimentCardSkeleton = () => (
<AspectRatio ratio={1.2} w="full">
<VStack align="center" borderColor="gray.200" borderWidth={1} p={4} bg="gray.50">
<VStack align="center" borderColor="gray.200" borderWidth={1} p={4} bg="white">
<SkeletonText noOfLines={1} w="80%" />
<SkeletonText noOfLines={2} w="60%" />
<SkeletonText noOfLines={1} w="80%" />

View File

@@ -16,11 +16,14 @@ export const useOnForkButtonPressed = () => {
const [onFork, isForking] = useHandledAsyncCallback(async () => {
if (!experiment.data?.id || !selectedProjectId) return;
const forkedExperimentId = await forkMutation.mutateAsync({
const newExperiment = await forkMutation.mutateAsync({
id: experiment.data.id,
projectId: selectedProjectId,
});
await router.push({ pathname: "/experiments/[id]", query: { id: forkedExperimentId } });
await router.push({
pathname: "/experiments/[experimentSlug]",
query: { experimentSlug: newExperiment.slug },
});
}, [forkMutation, experiment.data?.id, router]);
const onForkButtonPressed = useCallback(() => {

View File

@@ -1,4 +1,4 @@
import { useState, useEffect } from "react";
import { useState, useEffect, useRef } from "react";
import {
Heading,
VStack,
@@ -9,14 +9,14 @@ import {
Box,
Link as ChakraLink,
Flex,
useBreakpointValue,
} from "@chakra-ui/react";
import Head from "next/head";
import Link from "next/link";
import { BsGearFill, BsGithub, BsPersonCircle } from "react-icons/bs";
import { IoStatsChartOutline } from "react-icons/io5";
import { RiDatabase2Line, RiFlaskLine } from "react-icons/ri";
import { RiHome3Line, RiDatabase2Line, RiFlaskLine } from "react-icons/ri";
import { signIn, useSession } from "next-auth/react";
import UserMenu from "./UserMenu";
import { env } from "~/env.mjs";
import ProjectMenu from "./ProjectMenu";
import NavSidebarOption from "./NavSidebarOption";
@@ -27,10 +27,16 @@ const Divider = () => <Box h="1px" bgColor="gray.300" w="full" />;
const NavSidebar = () => {
const user = useSession().data;
// Hack to get around initial flash, see https://github.com/chakra-ui/chakra-ui/issues/6452
const isMobile = useBreakpointValue({ base: true, md: false, ssr: false });
const renderCount = useRef(0);
renderCount.current++;
const displayLogo = isMobile && renderCount.current > 1;
return (
<VStack
align="stretch"
bgColor="gray.50"
py={2}
px={2}
pb={0}
@@ -40,25 +46,59 @@ const NavSidebar = () => {
borderRightWidth={1}
borderColor="gray.300"
>
<HStack as={Link} href="/" _hover={{ textDecoration: "none" }} spacing={0} px={2} py={2}>
<Image src="/logo.svg" alt="" boxSize={6} mr={4} />
<Heading size="md" fontFamily="inconsolata, monospace">
OpenPipe
</Heading>
</HStack>
<Divider />
{displayLogo && (
<>
<HStack
as={Link}
href="/"
_hover={{ textDecoration: "none" }}
spacing={{ base: 1, md: 0 }}
mx={2}
py={{ base: 1, md: 2 }}
>
<Image src="/logo.svg" alt="" boxSize={6} mr={4} ml={{ base: 0.5, md: 0 }} />
<Heading size="md" fontFamily="inconsolata, monospace">
OpenPipe
</Heading>
</HStack>
<Divider />
</>
)}
<VStack align="flex-start" overflowY="auto" overflowX="hidden" flex={1}>
{user != null && (
<>
<ProjectMenu />
<Divider />
{env.NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS && (
<IconLink icon={IoStatsChartOutline} label="Logged Calls" href="/logged-calls" beta />
<>
<IconLink icon={RiHome3Line} label="Dashboard" href="/dashboard" beta />
<IconLink
icon={IoStatsChartOutline}
label="Request Logs"
href="/request-logs"
beta
/>
</>
)}
<IconLink icon={RiFlaskLine} label="Experiments" href="/experiments" />
{env.NEXT_PUBLIC_SHOW_DATA && (
<IconLink icon={RiDatabase2Line} label="Data" href="/data" />
)}
<VStack w="full" alignItems="flex-start" spacing={0} pt={8}>
<Text
pl={2}
pb={2}
fontSize="xs"
fontWeight="bold"
color="gray.500"
display={{ base: "none", md: "flex" }}
>
CONFIGURATION
</Text>
<IconLink icon={BsGearFill} label="Project Settings" href="/project/settings" />
</VStack>
</>
)}
{user === null && (
@@ -80,20 +120,7 @@ const NavSidebar = () => {
</NavSidebarOption>
)}
</VStack>
<VStack w="full" alignItems="flex-start" spacing={0}>
<Text
pl={2}
pb={2}
fontSize="xs"
fontWeight="bold"
color="gray.500"
display={{ base: "none", md: "flex" }}
>
CONFIGURATION
</Text>
<IconLink icon={BsGearFill} label="Project Settings" href="/project/settings" />
</VStack>
{user && <UserMenu user={user} borderColor={"gray.200"} />}
<Divider />
<VStack spacing={0} align="center">
<ChakraLink
@@ -153,7 +180,7 @@ export default function AppShell({
<title>{title ? `${title} | OpenPipe` : "OpenPipe"}</title>
</Head>
<NavSidebar />
<Box h="100%" flex={1} overflowY="auto">
<Box h="100%" flex={1} overflowY="auto" bgColor="gray.50">
{children}
</Box>
</Flex>

View File

@@ -1,11 +1,10 @@
import { Box, type BoxProps } from "@chakra-ui/react";
import { Box, type BoxProps, forwardRef } from "@chakra-ui/react";
import { useRouter } from "next/router";
const NavSidebarOption = ({
activeHrefPattern,
disableHoverEffect,
...props
}: { activeHrefPattern?: string; disableHoverEffect?: boolean } & BoxProps) => {
const NavSidebarOption = forwardRef<
{ activeHrefPattern?: string; disableHoverEffect?: boolean } & BoxProps,
"div"
>(({ activeHrefPattern, disableHoverEffect, ...props }, ref) => {
const router = useRouter();
const isActive = activeHrefPattern && router.pathname.startsWith(activeHrefPattern);
return (
@@ -18,10 +17,13 @@ const NavSidebarOption = ({
cursor="pointer"
borderRadius={4}
{...props}
ref={ref}
>
{props.children}
</Box>
);
};
});
NavSidebarOption.displayName = "NavSidebarOption";
export default NavSidebarOption;

View File

@@ -6,17 +6,18 @@ import {
PopoverTrigger,
PopoverContent,
Flex,
IconButton,
Icon,
Divider,
Button,
useDisclosure,
Spinner,
Link as ChakraLink,
Image,
Box,
} from "@chakra-ui/react";
import React, { useEffect, useState } from "react";
import { useEffect } from "react";
import Link from "next/link";
import { AiFillCaretDown } from "react-icons/ai";
import { BsGear, BsPlus } from "react-icons/bs";
import { BsPlus, BsPersonCircle } from "react-icons/bs";
import { type Project } from "@prisma/client";
import { useAppStore } from "~/state/store";
@@ -24,13 +25,14 @@ import { api } from "~/utils/api";
import NavSidebarOption from "./NavSidebarOption";
import { useHandledAsyncCallback, useSelectedProject } from "~/utils/hooks";
import { useRouter } from "next/router";
import { useSession, signOut } from "next-auth/react";
export default function ProjectMenu() {
const router = useRouter();
const utils = api.useContext();
const selectedProjectId = useAppStore((s) => s.selectedProjectId);
const setselectedProjectId = useAppStore((s) => s.setselectedProjectId);
const setSelectedProjectId = useAppStore((s) => s.setSelectedProjectId);
const { data: projects } = api.projects.list.useQuery();
@@ -40,9 +42,9 @@ export default function ProjectMenu() {
projects[0] &&
(!selectedProjectId || !projects.find((proj) => proj.id === selectedProjectId))
) {
setselectedProjectId(projects[0].id);
setSelectedProjectId(projects[0].id);
}
}, [selectedProjectId, setselectedProjectId, projects]);
}, [selectedProjectId, setSelectedProjectId, projects]);
const { data: selectedProject } = useSelectedProject();
@@ -50,33 +52,32 @@ export default function ProjectMenu() {
const createMutation = api.projects.create.useMutation();
const [createProject, isLoading] = useHandledAsyncCallback(async () => {
const newProj = await createMutation.mutateAsync({ name: "New Project" });
const newProj = await createMutation.mutateAsync({ name: "Untitled Project" });
await utils.projects.list.invalidate();
setselectedProjectId(newProj.id);
setSelectedProjectId(newProj.id);
await router.push({ pathname: "/project/settings" });
}, [createMutation, router]);
const user = useSession().data;
const profileImage = user?.user.image ? (
<Image src={user.user.image} alt="profile picture" boxSize={6} borderRadius="50%" />
) : (
<Icon as={BsPersonCircle} boxSize={6} />
);
return (
<VStack w="full" alignItems="flex-start" spacing={0}>
<Text
pl={2}
pb={2}
fontSize="xs"
fontWeight="bold"
color="gray.500"
display={{ base: "none", md: "flex" }}
<VStack w="full" alignItems="flex-start" spacing={0} py={1}>
<Popover
placement="bottom"
isOpen={popover.isOpen}
onOpen={popover.onOpen}
onClose={popover.onClose}
closeOnBlur
>
PROJECT
</Text>
<NavSidebarOption>
<Popover
placement="bottom-start"
isOpen={popover.isOpen}
onClose={popover.onClose}
closeOnBlur
>
<PopoverTrigger>
<HStack w="full" onClick={popover.onToggle}>
<PopoverTrigger>
<NavSidebarOption>
<HStack w="full">
<Flex
p={1}
borderRadius={4}
@@ -89,49 +90,78 @@ export default function ProjectMenu() {
>
<Text>{selectedProject?.name[0]?.toUpperCase()}</Text>
</Flex>
<Text fontSize="sm" display={{ base: "none", md: "block" }} py={1} flex={1}>
<Text
fontSize="sm"
display={{ base: "none", md: "block" }}
py={1}
flex={1}
fontWeight="bold"
>
{selectedProject?.name}
</Text>
<Icon as={AiFillCaretDown} boxSize={3} size="xs" color="gray.500" mr={2} />
<Box mr={2}>{profileImage}</Box>
</HStack>
</PopoverTrigger>
<PopoverContent
_focusVisible={{ boxShadow: "unset" }}
minW={0}
borderColor="blue.400"
w="full"
>
<VStack alignItems="flex-start" spacing={2} py={4} px={2}>
<Text color="gray.500" fontSize="xs" fontWeight="bold" pb={1}>
PROJECTS
</Text>
<Divider />
<VStack spacing={0} w="full">
{projects?.map((proj) => (
<ProjectOption
key={proj.id}
proj={proj}
isActive={proj.id === selectedProjectId}
onClose={popover.onClose}
/>
))}
</VStack>
</NavSidebarOption>
</PopoverTrigger>
<PopoverContent
_focusVisible={{ outline: "unset" }}
w={220}
ml={{ base: 2, md: 0 }}
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);"
fontSize="sm"
>
<VStack alignItems="flex-start" spacing={1} py={1}>
<Text px={3} py={2}>
{user?.user.email}
</Text>
<Divider />
<Text alignSelf="flex-start" fontWeight="bold" px={3} pt={2}>
Your Projects
</Text>
<VStack spacing={0} w="full" px={1}>
{projects?.map((proj) => (
<ProjectOption
key={proj.id}
proj={proj}
isActive={proj.id === selectedProjectId}
onClose={popover.onClose}
/>
))}
<HStack
as={Button}
variant="ghost"
colorScheme="blue"
color="blue.400"
pr={8}
w="full"
fontSize="sm"
justifyContent="flex-start"
onClick={createProject}
w="full"
borderRadius={4}
spacing={0}
>
<Icon as={isLoading ? Spinner : BsPlus} boxSize={6} />
<Text>New project</Text>
<Text>Add project</Text>
<Icon as={isLoading ? Spinner : BsPlus} boxSize={4} strokeWidth={0.5} />
</HStack>
</VStack>
</PopoverContent>
</Popover>
</NavSidebarOption>
<Divider />
<VStack w="full" px={1}>
<ChakraLink
onClick={() => {
signOut().catch(console.error);
}}
_hover={{ bgColor: "gray.200", textDecoration: "none" }}
w="full"
py={2}
px={2}
borderRadius={4}
>
<Text>Sign out</Text>
</ChakraLink>
</VStack>
</VStack>
</PopoverContent>
</Popover>
</VStack>
);
}
@@ -145,36 +175,26 @@ const ProjectOption = ({
isActive: boolean;
onClose: () => void;
}) => {
const setselectedProjectId = useAppStore((s) => s.setselectedProjectId);
const [gearHovered, setGearHovered] = useState(false);
const setSelectedProjectId = useAppStore((s) => s.setSelectedProjectId);
return (
<HStack
as={Link}
href="/experiments"
onClick={() => {
setselectedProjectId(proj.id);
setSelectedProjectId(proj.id);
onClose();
}}
w="full"
justifyContent="space-between"
bgColor={isActive ? "gray.100" : "transparent"}
_hover={gearHovered ? undefined : { bgColor: "gray.200", textDecoration: "none" }}
p={2}
_hover={{ bgColor: "gray.200", textDecoration: "none" }}
bgColor={isActive ? "gray.100" : undefined}
py={2}
px={4}
borderRadius={4}
spacing={4}
>
<Text>{proj.name}</Text>
<IconButton
as={Link}
href="/project/settings"
aria-label={`Open ${proj.name} settings`}
icon={<Icon as={BsGear} boxSize={5} strokeWidth={0.5} color="gray.500" />}
variant="ghost"
size="xs"
p={0}
onMouseEnter={() => setGearHovered(true)}
onMouseLeave={() => setGearHovered(false)}
_hover={{ bgColor: isActive ? "gray.300" : "gray.100", transitionDelay: 0 }}
borderRadius={4}
/>
</HStack>
);
};

View File

@@ -9,7 +9,6 @@ import {
PopoverContent,
Link,
type StackProps,
Box,
} from "@chakra-ui/react";
import { type Session } from "next-auth";
import { signOut } from "next-auth/react";
@@ -27,30 +26,28 @@ export default function UserMenu({ user, ...rest }: { user: Session } & StackPro
<>
<Popover placement="right">
<PopoverTrigger>
<Box>
<NavSidebarOption>
<HStack
// Weird values to make mobile look right; can clean up when we make the sidebar disappear on mobile
py={2}
px={1}
spacing={3}
{...rest}
>
{profileImage}
<VStack spacing={0} align="start" flex={1} flexShrink={1}>
<Text fontWeight="bold" fontSize="sm">
{user.user.name}
</Text>
<Text color="gray.500" fontSize="xs">
{/* {user.user.email} */}
</Text>
</VStack>
<Icon as={BsChevronRight} boxSize={4} color="gray.500" />
</HStack>
</NavSidebarOption>
</Box>
<NavSidebarOption>
<HStack
// Weird values to make mobile look right; can clean up when we make the sidebar disappear on mobile
py={2}
px={1}
spacing={3}
{...rest}
>
{profileImage}
<VStack spacing={0} align="start" flex={1} flexShrink={1}>
<Text fontWeight="bold" fontSize="sm">
{user.user.name}
</Text>
<Text color="gray.500" fontSize="xs">
{/* {user.user.email} */}
</Text>
</VStack>
<Icon as={BsChevronRight} boxSize={4} color="gray.500" />
</HStack>
</NavSidebarOption>
</PopoverTrigger>
<PopoverContent _focusVisible={{ boxShadow: "unset", outline: "unset" }} maxW="200px">
<PopoverContent _focusVisible={{ outline: "unset" }} ml={-1} minW={48} w="full">
<VStack align="stretch" spacing={0}>
{/* sign out */}
<HStack

View File

@@ -0,0 +1,128 @@
import {
Button,
FormControl,
FormLabel,
Input,
FormHelperText,
HStack,
Modal,
ModalBody,
ModalCloseButton,
ModalContent,
ModalFooter,
ModalHeader,
ModalOverlay,
Spinner,
Text,
VStack,
RadioGroup,
Radio,
} from "@chakra-ui/react";
import { useState, useEffect } from "react";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useSelectedProject } from "~/utils/hooks";
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
import { type ProjectUserRole } from "@prisma/client";
export const InviteMemberModal = ({
isOpen,
onClose,
}: {
isOpen: boolean;
onClose: () => void;
}) => {
const selectedProject = useSelectedProject().data;
const utils = api.useContext();
const [email, setEmail] = useState("");
const [role, setRole] = useState<ProjectUserRole>("MEMBER");
useEffect(() => {
setEmail("");
setRole("MEMBER");
}, [isOpen]);
const emailIsValid = !email || !email.match(/.+@.+\..+/);
const inviteMemberMutation = api.users.inviteToProject.useMutation();
const [inviteMember, isInviting] = useHandledAsyncCallback(async () => {
if (!selectedProject?.id || !role) return;
const resp = await inviteMemberMutation.mutateAsync({
projectId: selectedProject.id,
email,
role,
});
if (maybeReportError(resp)) return;
await utils.projects.get.invalidate();
onClose();
}, [inviteMemberMutation, email, role, selectedProject?.id, onClose]);
return (
<Modal isOpen={isOpen} onClose={onClose}>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader>
<HStack>
<Text>Invite Member</Text>
</HStack>
</ModalHeader>
<ModalCloseButton />
<ModalBody>
<VStack spacing={8} alignItems="flex-start">
<Text>
Invite a new member to <b>{selectedProject?.name}</b>.
</Text>
<RadioGroup
value={role}
onChange={(e) => setRole(e as ProjectUserRole)}
colorScheme="orange"
>
<VStack w="full" alignItems="flex-start">
<Radio value="MEMBER">
<Text fontSize="sm">MEMBER</Text>
</Radio>
<Radio value="ADMIN">
<Text fontSize="sm">ADMIN</Text>
</Radio>
</VStack>
</RadioGroup>
<FormControl>
<FormLabel>Email</FormLabel>
<Input
type="email"
value={email}
onChange={(e) => setEmail(e.target.value)}
onKeyDown={(e) => {
if (e.key === "Enter" && (e.metaKey || e.ctrlKey || e.shiftKey)) {
e.preventDefault();
e.currentTarget.blur();
inviteMember();
}
}}
/>
<FormHelperText>Enter the email of the person you want to invite.</FormHelperText>
</FormControl>
</VStack>
</ModalBody>
<ModalFooter mt={4}>
<HStack>
<Button colorScheme="gray" onClick={onClose} minW={24}>
<Text>Cancel</Text>
</Button>
<Button
colorScheme="orange"
onClick={inviteMember}
minW={24}
isDisabled={emailIsValid || isInviting}
>
{isInviting ? <Spinner boxSize={4} /> : <Text>Send Invitation</Text>}
</Button>
</HStack>
</ModalFooter>
</ModalContent>
</Modal>
);
};

View File

@@ -0,0 +1,145 @@
import { useMemo, useState } from "react";
import {
Table,
Thead,
Tr,
Th,
Tbody,
Td,
IconButton,
useDisclosure,
Text,
Button,
} from "@chakra-ui/react";
import { useSession } from "next-auth/react";
import { BsTrash } from "react-icons/bs";
import { type User } from "@prisma/client";
import { useHandledAsyncCallback, useSelectedProject } from "~/utils/hooks";
import { InviteMemberModal } from "./InviteMemberModal";
import { RemoveMemberDialog } from "./RemoveMemberDialog";
import { api } from "~/utils/api";
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
const MemberTable = () => {
const selectedProject = useSelectedProject().data;
const session = useSession().data;
const utils = api.useContext();
const [memberToRemove, setMemberToRemove] = useState<User | null>(null);
const inviteMemberModal = useDisclosure();
const cancelInvitationMutation = api.users.cancelProjectInvitation.useMutation();
const [cancelInvitation, isCancelling] = useHandledAsyncCallback(
async (invitationToken: string) => {
if (!selectedProject?.id) return;
const resp = await cancelInvitationMutation.mutateAsync({
invitationToken,
});
if (maybeReportError(resp)) return;
await utils.projects.get.invalidate();
},
[selectedProject?.id, cancelInvitationMutation],
);
const sortedMembers = useMemo(() => {
if (!selectedProject?.projectUsers) return [];
return selectedProject.projectUsers.sort((a, b) => {
if (a.role === b.role) return a.createdAt < b.createdAt ? -1 : 1;
// Take advantage of fact that ADMIN is alphabetically before MEMBER
return a.role < b.role ? -1 : 1;
});
}, [selectedProject?.projectUsers]);
return (
<>
<Table fontSize={{ base: "sm", md: "md" }}>
<Thead
sx={{
th: {
base: { px: 0 },
md: { px: 6 },
},
}}
>
<Tr>
<Th>Name</Th>
<Th display={{ base: "none", md: "table-cell" }}>Email</Th>
<Th>Role</Th>
{selectedProject?.role === "ADMIN" && <Th />}
</Tr>
</Thead>
<Tbody
sx={{
td: {
base: { px: 0 },
md: { px: 6 },
},
}}
>
{selectedProject &&
sortedMembers.map((member) => {
return (
<Tr key={member.id}>
<Td>
<Text fontWeight="bold">{member.user.name}</Text>
</Td>
<Td display={{ base: "none", md: "table-cell" }} h="full">
{member.user.email}
</Td>
<Td fontSize={{ base: "xs", md: "sm" }}>{member.role}</Td>
{selectedProject.role === "ADMIN" && (
<Td textAlign="end">
{member.user.id !== session?.user?.id &&
member.user.id !== selectedProject.personalProjectUserId && (
<IconButton
aria-label="Remove member"
colorScheme="red"
icon={<BsTrash />}
onClick={() => setMemberToRemove(member.user)}
/>
)}
</Td>
)}
</Tr>
);
})}
{selectedProject?.projectUserInvitations?.map((invitation) => {
return (
<Tr key={invitation.id}>
<Td>
<Text as="i">Invitation pending</Text>
</Td>
<Td>{invitation.email}</Td>
<Td fontSize="sm">{invitation.role}</Td>
{selectedProject.role === "ADMIN" && (
<Td textAlign="end">
<Button
size="sm"
colorScheme="red"
variant="ghost"
onClick={() => cancelInvitation(invitation.invitationToken)}
isLoading={isCancelling}
>
Cancel
</Button>
</Td>
)}
</Tr>
);
})}
</Tbody>
</Table>
<InviteMemberModal isOpen={inviteMemberModal.isOpen} onClose={inviteMemberModal.onClose} />
<RemoveMemberDialog
member={memberToRemove}
isOpen={!!memberToRemove}
onClose={() => setMemberToRemove(null)}
/>
</>
);
};
export default MemberTable;

View File

@@ -0,0 +1,71 @@
import {
Button,
AlertDialog,
AlertDialogBody,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogContent,
AlertDialogOverlay,
Text,
VStack,
Spinner,
} from "@chakra-ui/react";
import { type User } from "@prisma/client";
import { useRouter } from "next/router";
import { useRef } from "react";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useSelectedProject } from "~/utils/hooks";
export const RemoveMemberDialog = ({
isOpen,
onClose,
member,
}: {
isOpen: boolean;
onClose: () => void;
member: User | null;
}) => {
const selectedProject = useSelectedProject();
const removeUserMutation = api.users.removeUserFromProject.useMutation();
const utils = api.useContext();
const router = useRouter();
const cancelRef = useRef<HTMLButtonElement>(null);
const [onRemoveConfirm, isRemoving] = useHandledAsyncCallback(async () => {
if (!selectedProject.data?.id || !member?.id) return;
await removeUserMutation.mutateAsync({ projectId: selectedProject.data.id, userId: member.id });
await utils.projects.get.invalidate();
onClose();
}, [removeUserMutation, selectedProject, router]);
return (
<AlertDialog isOpen={isOpen} leastDestructiveRef={cancelRef} onClose={onClose}>
<AlertDialogOverlay>
<AlertDialogContent>
<AlertDialogHeader fontSize="lg" fontWeight="bold">
Remove Member
</AlertDialogHeader>
<AlertDialogBody>
<VStack spacing={4} alignItems="flex-start">
<Text>
Are you sure you want to remove <b>{member?.name}</b> from the project?
</Text>
</VStack>
</AlertDialogBody>
<AlertDialogFooter>
<Button ref={cancelRef} onClick={onClose}>
Cancel
</Button>
<Button colorScheme="red" onClick={onRemoveConfirm} ml={3} w={20}>
{isRemoving ? <Spinner /> : "Remove"}
</Button>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialogOverlay>
</AlertDialog>
);
};

View File

@@ -0,0 +1,30 @@
import { Button, HStack, type ButtonProps, Icon, Text } from "@chakra-ui/react";
import { type IconType } from "react-icons";
const ActionButton = ({
icon,
label,
...buttonProps
}: { icon: IconType; label: string } & ButtonProps) => {
return (
<Button
colorScheme="blue"
color="black"
bgColor="white"
borderColor="gray.300"
borderRadius={4}
variant="outline"
size="sm"
fontSize="sm"
fontWeight="normal"
{...buttonProps}
>
<HStack spacing={1}>
{icon && <Icon as={icon} />}
<Text>{label}</Text>
</HStack>
</Button>
);
};
export default ActionButton;

View File

@@ -0,0 +1,55 @@
import { Box, IconButton, useToast } from "@chakra-ui/react";
import { CopyIcon } from "lucide-react";
import SyntaxHighlighter from "react-syntax-highlighter";
import { atelierCaveLight } from "react-syntax-highlighter/dist/cjs/styles/hljs";
import stringify from "json-stringify-pretty-compact";
const FormattedJson = ({ json }: { json: any }) => {
const jsonString = stringify(json, { maxLength: 40 });
const toast = useToast();
const copyToClipboard = async (text: string) => {
try {
await navigator.clipboard.writeText(text);
toast({
title: "Copied to clipboard",
status: "success",
duration: 2000,
});
} catch (err) {
toast({
title: "Failed to copy to clipboard",
status: "error",
duration: 2000,
});
}
};
return (
<Box position="relative" fontSize="sm" borderRadius="md" overflow="hidden">
<SyntaxHighlighter
customStyle={{ overflowX: "unset" }}
language="json"
style={atelierCaveLight}
lineProps={{
style: { wordBreak: "break-all", whiteSpace: "pre-wrap" },
}}
wrapLines
>
{jsonString}
</SyntaxHighlighter>
<IconButton
aria-label="Copy"
icon={<CopyIcon />}
position="absolute"
top={1}
right={1}
size="xs"
variant="ghost"
onClick={() => void copyToClipboard(jsonString)}
/>
</Box>
);
};
export { FormattedJson };

View File

@@ -0,0 +1,30 @@
import { Button, HStack, Icon, Text } from "@chakra-ui/react";
import { BsPlus } from "react-icons/bs";
import { comparators, defaultFilterableFields } from "~/state/logFiltersSlice";
import { useAppStore } from "~/state/store";
const AddFilterButton = () => {
const addFilter = useAppStore((s) => s.logFilters.addFilter);
return (
<HStack
as={Button}
variant="ghost"
onClick={() =>
addFilter({
id: Date.now().toString(),
field: defaultFilterableFields[0],
comparator: comparators[0],
value: "",
})
}
spacing={0}
fontSize="sm"
>
<Icon as={BsPlus} boxSize={5} />
<Text>Add Filter</Text>
</HStack>
);
};
export default AddFilterButton;

View File

@@ -0,0 +1,44 @@
import { useCallback, useState } from "react";
import { HStack, IconButton, Input } from "@chakra-ui/react";
import { BsTrash } from "react-icons/bs";
import { type LogFilter } from "~/state/logFiltersSlice";
import { useAppStore } from "~/state/store";
import { debounce } from "lodash-es";
import SelectFieldDropdown from "./SelectFieldDropdown";
import SelectComparatorDropdown from "./SelectComparatorDropdown";
const LogFilter = ({ filter }: { filter: LogFilter }) => {
const updateFilter = useAppStore((s) => s.logFilters.updateFilter);
const deleteFilter = useAppStore((s) => s.logFilters.deleteFilter);
const [editedValue, setEditedValue] = useState(filter.value);
const debouncedUpdateFilter = useCallback(
debounce((filter: LogFilter) => updateFilter(filter), 500, {
leading: true,
}),
[updateFilter],
);
return (
<HStack>
<SelectFieldDropdown filter={filter} />
<SelectComparatorDropdown filter={filter} />
<Input
value={editedValue}
onChange={(e) => {
setEditedValue(e.target.value);
debouncedUpdateFilter({ ...filter, value: e.target.value });
}}
/>
<IconButton
aria-label="Delete Filter"
icon={<BsTrash />}
onClick={() => deleteFilter(filter.id)}
/>
</HStack>
);
};
export default LogFilter;

View File

@@ -0,0 +1,30 @@
import { VStack, Text } from "@chakra-ui/react";
import AddFilterButton from "./AddFilterButton";
import { useAppStore } from "~/state/store";
import LogFilter from "./LogFilter";
const LogFilters = () => {
const filters = useAppStore((s) => s.logFilters.filters);
return (
<VStack
bgColor="white"
borderRadius={8}
borderWidth={1}
w="full"
alignItems="flex-start"
p={4}
spacing={4}
>
<Text fontWeight="bold" color="gray.500">
Filters
</Text>
{filters.map((filter) => (
<LogFilter key={filter.id} filter={filter} />
))}
<AddFilterButton />
</VStack>
);
};
export default LogFilters;

View File

@@ -0,0 +1,19 @@
import { comparators, type LogFilter } from "~/state/logFiltersSlice";
import { useAppStore } from "~/state/store";
import InputDropdown from "~/components/InputDropdown";
const SelectComparatorDropdown = ({ filter }: { filter: LogFilter }) => {
const updateFilter = useAppStore((s) => s.logFilters.updateFilter);
const { comparator } = filter;
return (
<InputDropdown
options={comparators}
selectedOption={comparator}
onSelect={(option) => updateFilter({ ...filter, comparator: option })}
/>
);
};
export default SelectComparatorDropdown;

View File

@@ -0,0 +1,22 @@
import { defaultFilterableFields, type LogFilter } from "~/state/logFiltersSlice";
import { useAppStore } from "~/state/store";
import { useTagNames } from "~/utils/hooks";
import InputDropdown from "~/components/InputDropdown";
const SelectFieldDropdown = ({ filter }: { filter: LogFilter }) => {
const tagNames = useTagNames().data;
const updateFilter = useAppStore((s) => s.logFilters.updateFilter);
const { field } = filter;
return (
<InputDropdown
options={[...defaultFilterableFields, ...(tagNames || [])]}
selectedOption={field}
onSelect={(option) => updateFilter({ ...filter, field: option })}
/>
);
};
export default SelectFieldDropdown;

View File

@@ -0,0 +1,16 @@
import { type StackProps } from "@chakra-ui/react";
import { useLoggedCalls } from "~/utils/hooks";
import Paginator from "../Paginator";
const LoggedCallsPaginator = (props: StackProps) => {
const { data } = useLoggedCalls();
if (!data) return null;
const { count } = data;
return <Paginator count={count} {...props} />;
};
export default LoggedCallsPaginator;

View File

@@ -0,0 +1,36 @@
import { Card, Table, Tbody } from "@chakra-ui/react";
import { useState } from "react";
import { useLoggedCalls } from "~/utils/hooks";
import { TableHeader, TableRow } from "./TableRow";
export default function LoggedCallsTable() {
const [expandedRow, setExpandedRow] = useState<string | null>(null);
const loggedCalls = useLoggedCalls().data;
return (
<Card width="100%" overflowX="auto">
<Table>
<TableHeader showCheckbox />
<Tbody>
{loggedCalls?.calls?.map((loggedCall) => {
return (
<TableRow
key={loggedCall.id}
loggedCall={loggedCall}
isExpanded={loggedCall.id === expandedRow}
onToggle={() => {
if (loggedCall.id === expandedRow) {
setExpandedRow(null);
} else {
setExpandedRow(loggedCall.id);
}
}}
showCheckbox
/>
);
})}
</Tbody>
</Table>
</Card>
);
}

View File

@@ -0,0 +1,170 @@
import {
Box,
Heading,
Td,
Tr,
Thead,
Th,
Tooltip,
Collapse,
HStack,
VStack,
Button,
ButtonGroup,
Text,
Checkbox,
} from "@chakra-ui/react";
import dayjs from "dayjs";
import relativeTime from "dayjs/plugin/relativeTime";
import Link from "next/link";
import { type RouterOutputs } from "~/utils/api";
import { FormattedJson } from "./FormattedJson";
import { useAppStore } from "~/state/store";
import { useLoggedCalls, useTagNames } from "~/utils/hooks";
import { useMemo } from "react";
dayjs.extend(relativeTime);
type LoggedCall = RouterOutputs["loggedCalls"]["list"]["calls"][0];
export const TableHeader = ({ showCheckbox }: { showCheckbox?: boolean }) => {
const matchingLogIds = useLoggedCalls().data?.matchingLogIds;
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
const addAll = useAppStore((s) => s.selectedLogs.addSelectedLogIds);
const clearAll = useAppStore((s) => s.selectedLogs.clearSelectedLogIds);
const allSelected = useMemo(() => {
if (!matchingLogIds || !matchingLogIds.length) return false;
return matchingLogIds.every((id) => selectedLogIds.has(id));
}, [selectedLogIds, matchingLogIds]);
const tagNames = useTagNames().data;
return (
<Thead>
<Tr>
{showCheckbox && (
<Th pr={0}>
<HStack minW={16}>
<Checkbox
isChecked={allSelected}
onChange={() => {
allSelected ? clearAll() : addAll(matchingLogIds || []);
}}
/>
<Text>
({selectedLogIds.size ? `${selectedLogIds.size}/` : ""}
{matchingLogIds?.length || 0})
</Text>
</HStack>
</Th>
)}
<Th>Sent At</Th>
<Th>Model</Th>
{tagNames?.map((tagName) => <Th key={tagName}>{tagName}</Th>)}
<Th isNumeric>Duration</Th>
<Th isNumeric>Input tokens</Th>
<Th isNumeric>Output tokens</Th>
<Th isNumeric>Status</Th>
</Tr>
</Thead>
);
};
export const TableRow = ({
loggedCall,
isExpanded,
onToggle,
showCheckbox,
}: {
loggedCall: LoggedCall;
isExpanded: boolean;
onToggle: () => void;
showCheckbox?: boolean;
}) => {
const isError = loggedCall.modelResponse?.statusCode !== 200;
const requestedAt = dayjs(loggedCall.requestedAt).format("MMMM D h:mm A");
const fullTime = dayjs(loggedCall.requestedAt).toString();
const isChecked = useAppStore((s) => s.selectedLogs.selectedLogIds.has(loggedCall.id));
const toggleChecked = useAppStore((s) => s.selectedLogs.toggleSelectedLogId);
const tagNames = useTagNames().data;
return (
<>
<Tr
onClick={onToggle}
key={loggedCall.id}
_hover={{ bgColor: "gray.50", cursor: "pointer" }}
sx={{
"> td": { borderBottom: "none" },
}}
fontSize="sm"
>
{showCheckbox && (
<Td>
<Checkbox isChecked={isChecked} onChange={() => toggleChecked(loggedCall.id)} />
</Td>
)}
<Td>
<Tooltip label={fullTime} placement="top">
<Box whiteSpace="nowrap" minW="120px">
{requestedAt}
</Box>
</Tooltip>
</Td>
<Td>
<HStack justifyContent="flex-start">
<Text
colorScheme="purple"
color="purple.500"
borderColor="purple.500"
px={1}
borderRadius={4}
borderWidth={1}
fontSize="xs"
whiteSpace="nowrap"
>
{loggedCall.model}
</Text>
</HStack>
</Td>
{tagNames?.map((tagName) => <Td key={tagName}>{loggedCall.tags[tagName]}</Td>)}
<Td isNumeric>
{loggedCall.cacheHit ? (
<Text color="gray.500">Cached</Text>
) : (
((loggedCall.modelResponse?.durationMs ?? 0) / 1000).toFixed(2) + "s"
)}
</Td>
<Td isNumeric>{loggedCall.modelResponse?.inputTokens}</Td>
<Td isNumeric>{loggedCall.modelResponse?.outputTokens}</Td>
<Td sx={{ color: isError ? "red.500" : "green.500", fontWeight: "semibold" }} isNumeric>
{loggedCall.modelResponse?.statusCode ?? "No response"}
</Td>
</Tr>
<Tr>
<Td colSpan={8} p={0}>
<Collapse in={isExpanded} unmountOnExit={true}>
<VStack p={4} align="stretch">
<HStack align="stretch">
<VStack flex={1} align="stretch">
<Heading size="sm">Input</Heading>
<FormattedJson json={loggedCall.modelResponse?.reqPayload} />
</VStack>
<VStack flex={1} align="stretch">
<Heading size="sm">Output</Heading>
<FormattedJson json={loggedCall.modelResponse?.respPayload} />
</VStack>
</HStack>
<ButtonGroup alignSelf="flex-end">
<Button as={Link} colorScheme="blue" href={{ pathname: "/experiments" }}>
Experiments
</Button>
</ButtonGroup>
</VStack>
</Collapse>
</Td>
</Tr>
</>
);
};

View File

@@ -2,14 +2,14 @@ import { HStack, Icon, Text, Tooltip, type TooltipProps, VStack, Divider } from
import { BsCurrencyDollar } from "react-icons/bs";
type CostTooltipProps = {
promptTokens: number | null;
completionTokens: number | null;
inputTokens: number | null;
outputTokens: number | null;
cost: number;
} & TooltipProps;
export const CostTooltip = ({
promptTokens,
completionTokens,
inputTokens,
outputTokens,
cost,
children,
...props
@@ -36,12 +36,12 @@ export const CostTooltip = ({
<HStack>
<VStack w="28" spacing={1}>
<Text>Prompt</Text>
<Text>{promptTokens ?? 0}</Text>
<Text>{inputTokens ?? 0}</Text>
</VStack>
<Divider borderColor="gray.200" h={8} orientation="vertical" />
<VStack w="28" spacing={1}>
<Text whiteSpace="nowrap">Completion</Text>
<Text>{completionTokens ?? 0}</Text>
<Text>{outputTokens ?? 0}</Text>
</VStack>
</HStack>
</VStack>

View File

@@ -21,6 +21,11 @@ export const env = createEnv({
ANTHROPIC_API_KEY: z.string().default("placeholder"),
SENTRY_AUTH_TOKEN: z.string().optional(),
OPENPIPE_API_KEY: z.string().optional(),
SENDER_EMAIL: z.string().default("placeholder"),
SMTP_HOST: z.string().default("placeholder"),
SMTP_PORT: z.string().default("placeholder"),
SMTP_LOGIN: z.string().default("placeholder"),
SMTP_PASSWORD: z.string().default("placeholder"),
},
/**
@@ -58,6 +63,11 @@ export const env = createEnv({
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
OPENPIPE_API_KEY: process.env.OPENPIPE_API_KEY,
NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS: process.env.NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS,
SENDER_EMAIL: process.env.SENDER_EMAIL,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_LOGIN: process.env.SMTP_LOGIN,
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
},
/**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.

View File

@@ -28,6 +28,10 @@ const modelProvider: AnthropicProvider = {
inputSchema: inputSchema as JSONSchema4,
canStream: true,
getCompletion,
getUsage: (input, output) => {
// TODO: add usage logic
return null;
},
...frontendModelProvider,
};

View File

@@ -1,6 +1,7 @@
import openaiChatCompletionFrontend from "./openai-ChatCompletion/frontend";
import replicateLlama2Frontend from "./replicate-llama2/frontend";
import anthropicFrontend from "./anthropic-completion/frontend";
import openpipeFrontend from "./openpipe-chat/frontend";
import { type SupportedProvider, type FrontendModelProvider } from "./types";
// Keep attributes here that need to be accessible from the frontend. We can't
@@ -10,6 +11,7 @@ const frontendModelProviders: Record<SupportedProvider, FrontendModelProvider<an
"openai/ChatCompletion": openaiChatCompletionFrontend,
"replicate/llama2": replicateLlama2Frontend,
"anthropic/completion": anthropicFrontend,
"openpipe/Chat": openpipeFrontend,
};
export default frontendModelProviders;

View File

@@ -1,12 +1,14 @@
import openaiChatCompletion from "./openai-ChatCompletion";
import replicateLlama2 from "./replicate-llama2";
import anthropicCompletion from "./anthropic-completion";
import openpipeChatCompletion from "./openpipe-chat";
import { type SupportedProvider, type ModelProvider } from "./types";
const modelProviders: Record<SupportedProvider, ModelProvider<any, any, any>> = {
"openai/ChatCompletion": openaiChatCompletion,
"replicate/llama2": replicateLlama2,
"anthropic/completion": anthropicCompletion,
"openpipe/Chat": openpipeChatCompletion,
};
export default modelProviders;

View File

@@ -1,58 +1,10 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */
import {
type ChatCompletionChunk,
type ChatCompletion,
type CompletionCreateParams,
} from "openai/resources/chat";
import { countOpenAIChatTokens } from "~/utils/countTokens";
import { type CompletionResponse } from "../types";
import { isArray, isString, omit } from "lodash-es";
import { openai } from "~/server/utils/openai";
import { truthyFilter } from "~/utils/utils";
import { isArray, isString } from "lodash-es";
import { APIError } from "openai";
import frontendModelProvider from "./frontend";
import modelProvider, { type SupportedModel } from ".";
const mergeStreamedChunks = (
base: ChatCompletion | null,
chunk: ChatCompletionChunk,
): ChatCompletion => {
if (base === null) {
return mergeStreamedChunks({ ...chunk, choices: [] }, chunk);
}
const choices = [...base.choices];
for (const choice of chunk.choices) {
const baseChoice = choices.find((c) => c.index === choice.index);
if (baseChoice) {
baseChoice.finish_reason = choice.finish_reason ?? baseChoice.finish_reason;
baseChoice.message = baseChoice.message ?? { role: "assistant" };
if (choice.delta?.content)
baseChoice.message.content =
((baseChoice.message.content as string) ?? "") + (choice.delta.content ?? "");
if (choice.delta?.function_call) {
const fnCall = baseChoice.message.function_call ?? {};
fnCall.name =
((fnCall.name as string) ?? "") + ((choice.delta.function_call.name as string) ?? "");
fnCall.arguments =
((fnCall.arguments as string) ?? "") +
((choice.delta.function_call.arguments as string) ?? "");
}
} else {
// @ts-expect-error the types are correctly telling us that finish_reason
// could be null, but don't want to fix it right now.
choices.push({ ...omit(choice, "delta"), message: { role: "assistant", ...choice.delta } });
}
}
const merged: ChatCompletion = {
...base,
choices,
};
return merged;
};
import { type ChatCompletion, type CompletionCreateParams } from "openai/resources/chat";
import mergeChunks from "openpipe/src/openai/mergeChunks";
import { openai } from "~/server/utils/openai";
import { type CompletionResponse } from "../types";
export async function getCompletion(
input: CompletionCreateParams,
@@ -60,13 +12,9 @@ export async function getCompletion(
): Promise<CompletionResponse<ChatCompletion>> {
const start = Date.now();
let finalCompletion: ChatCompletion | null = null;
let promptTokens: number | undefined = undefined;
let completionTokens: number | undefined = undefined;
const modelName = modelProvider.getModel(input) as SupportedModel;
try {
if (onStream) {
console.log("got started");
const resp = await openai.chat.completions.create(
{ ...input, stream: true },
{
@@ -74,11 +22,9 @@ export async function getCompletion(
},
);
for await (const part of resp) {
console.log("got part", part);
finalCompletion = mergeStreamedChunks(finalCompletion, part);
finalCompletion = mergeChunks(finalCompletion, part);
onStream(finalCompletion);
}
console.log("got final", finalCompletion);
if (!finalCompletion) {
return {
type: "error",
@@ -86,16 +32,6 @@ export async function getCompletion(
autoRetry: false,
};
}
try {
promptTokens = countOpenAIChatTokens(modelName, input.messages);
completionTokens = countOpenAIChatTokens(
modelName,
finalCompletion.choices.map((c) => c.message).filter(truthyFilter),
);
} catch (err) {
// TODO handle this, library seems like maybe it doesn't work with function calls?
console.error(err);
}
} else {
const resp = await openai.chat.completions.create(
{ ...input, stream: false },
@@ -104,25 +40,14 @@ export async function getCompletion(
},
);
finalCompletion = resp;
promptTokens = resp.usage?.prompt_tokens ?? 0;
completionTokens = resp.usage?.completion_tokens ?? 0;
}
const timeToComplete = Date.now() - start;
const { promptTokenPrice, completionTokenPrice } = frontendModelProvider.models[modelName];
let cost = undefined;
if (promptTokenPrice && completionTokenPrice && promptTokens && completionTokens) {
cost = promptTokens * promptTokenPrice + completionTokens * completionTokenPrice;
}
return {
type: "success",
statusCode: 200,
value: finalCompletion,
timeToComplete,
promptTokens,
completionTokens,
cost,
};
} catch (error: unknown) {
if (error instanceof APIError) {

View File

@@ -4,6 +4,8 @@ import inputSchema from "./codegen/input.schema.json";
import { type ChatCompletion, type CompletionCreateParams } from "openai/resources/chat";
import { getCompletion } from "./getCompletion";
import frontendModelProvider from "./frontend";
import { countOpenAIChatTokens } from "~/utils/countTokens";
import { truthyFilter } from "~/utils/utils";
const supportedModels = [
"gpt-4-0613",
@@ -39,6 +41,41 @@ const modelProvider: OpenaiChatModelProvider = {
inputSchema: inputSchema as JSONSchema4,
canStream: true,
getCompletion,
getUsage: (input, output) => {
if (output.choices.length === 0) return null;
const model = modelProvider.getModel(input);
if (!model) return null;
let inputTokens: number;
let outputTokens: number;
if (output.usage) {
inputTokens = output.usage.prompt_tokens;
outputTokens = output.usage.completion_tokens;
} else {
try {
inputTokens = countOpenAIChatTokens(model, input.messages);
outputTokens = countOpenAIChatTokens(
model,
output.choices.map((c) => c.message).filter(truthyFilter),
);
} catch (err) {
inputTokens = 0;
outputTokens = 0;
// TODO handle this, library seems like maybe it doesn't work with function calls?
console.error(err);
}
}
const { promptTokenPrice, completionTokenPrice } = frontendModelProvider.models[model];
let cost = undefined;
if (promptTokenPrice && completionTokenPrice && inputTokens && outputTokens) {
cost = inputTokens * promptTokenPrice + outputTokens * completionTokenPrice;
}
return { inputTokens: inputTokens, outputTokens: outputTokens, cost };
},
...frontendModelProvider,
};

View File

@@ -12,7 +12,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
@@ -29,7 +28,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
@@ -120,13 +118,12 @@ export const refinementActions: Record<string, RefinementAction> = {
"Convert to function call": {
icon: TfiThought,
description: "Use function calls to get output from the model in a more structured way.",
instructions: `OpenAI functions are a specialized way for an LLM to return output.
instructions: `OpenAI functions are a specialized way for an LLM to return its final output.
This is what a prompt looks like before adding a function:
Example 1 before:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
@@ -139,11 +136,10 @@ export const refinementActions: Record<string, RefinementAction> = {
],
});
This is what one looks like after adding a function:
Example 1 after:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
@@ -156,7 +152,7 @@ export const refinementActions: Record<string, RefinementAction> = {
],
functions: [
{
name: "extract_sentiment",
name: "log_extracted_sentiment",
parameters: {
type: "object", // parameters must always be an object with a properties key
properties: { // properties key is required
@@ -169,13 +165,13 @@ export const refinementActions: Record<string, RefinementAction> = {
},
],
function_call: {
name: "extract_sentiment",
name: "log_extracted_sentiment",
},
});
Here's another example of adding a function:
Before:
=========
Example 2 before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -197,7 +193,7 @@ export const refinementActions: Record<string, RefinementAction> = {
temperature: 0,
});
After:
Example 2 after:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -215,7 +211,7 @@ export const refinementActions: Record<string, RefinementAction> = {
temperature: 0,
functions: [
{
name: "score_post",
name: "log_post_score",
parameters: {
type: "object",
properties: {
@@ -227,17 +223,16 @@ export const refinementActions: Record<string, RefinementAction> = {
},
],
function_call: {
name: "score_post",
name: "log_post_score",
},
});
Another example
=========
Before:
Example 3 before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
stream: true,
messages: [
{
role: "system",
@@ -246,7 +241,7 @@ export const refinementActions: Record<string, RefinementAction> = {
],
});
After:
Example 3 after:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -258,21 +253,24 @@ export const refinementActions: Record<string, RefinementAction> = {
],
functions: [
{
name: "write_in_language",
name: "log_translated_text",
parameters: {
type: "object",
properties: {
text: {
translated_text: {
type: "string",
description: "The text, written in the language specified in the prompt",
},
},
},
},
],
function_call: {
name: "write_in_language",
name: "log_translated_text",
},
});
=========
Add an OpenAI function that takes one or more nested parameters that match the expected output from this prompt.`,
},

View File

@@ -0,0 +1,88 @@
import { type OpenpipeChatOutput, type SupportedModel } from ".";
import { type FrontendModelProvider } from "../types";
import { refinementActions } from "./refinementActions";
import {
templateOpenOrcaPrompt,
templateAlpacaInstructPrompt,
// templateSystemUserAssistantPrompt,
templateInstructionInputResponsePrompt,
templateAiroborosPrompt,
templateVicunaPrompt,
} from "./templatePrompt";
const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatOutput> = {
name: "OpenAI ChatCompletion",
models: {
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B": {
name: "OpenOrcaxOpenChat-Preview2-13B",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
templatePrompt: templateOpenOrcaPrompt,
},
"Open-Orca/OpenOrca-Platypus2-13B": {
name: "OpenOrca-Platypus2-13B",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/Open-Orca/OpenOrca-Platypus2-13B",
templatePrompt: templateAlpacaInstructPrompt,
defaultStopTokens: ["</s>"],
},
// "stabilityai/StableBeluga-13B": {
// name: "StableBeluga-13B",
// contextWindow: 4096,
// pricePerSecond: 0.0003,
// speed: "medium",
// provider: "openpipe/Chat",
// learnMoreUrl: "https://huggingface.co/stabilityai/StableBeluga-13B",
// templatePrompt: templateSystemUserAssistantPrompt,
// },
"NousResearch/Nous-Hermes-Llama2-13b": {
name: "Nous-Hermes-Llama2-13b",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/NousResearch/Nous-Hermes-Llama2-13b",
templatePrompt: templateInstructionInputResponsePrompt,
},
"jondurbin/airoboros-l2-13b-gpt4-2.0": {
name: "airoboros-l2-13b-gpt4-2.0",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0",
templatePrompt: templateAiroborosPrompt,
},
"lmsys/vicuna-13b-v1.5": {
name: "vicuna-13b-v1.5",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/lmsys/vicuna-13b-v1.5",
templatePrompt: templateVicunaPrompt,
},
"NousResearch/Nous-Hermes-llama-2-7b": {
name: "Nous-Hermes-llama-2-7b",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b",
templatePrompt: templateInstructionInputResponsePrompt,
},
},
refinementActions,
normalizeOutput: (output) => ({ type: "text", value: output }),
};
export default frontendModelProvider;

View File

@@ -0,0 +1,120 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */
import { isArray, isString } from "lodash-es";
import OpenAI, { APIError } from "openai";
import { type CompletionResponse } from "../types";
import { type OpenpipeChatInput, type OpenpipeChatOutput } from ".";
import frontendModelProvider from "./frontend";
const modelEndpoints: Record<OpenpipeChatInput["model"], string> = {
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B": "https://5ef82gjxk8kdys-8000.proxy.runpod.net/v1",
"Open-Orca/OpenOrca-Platypus2-13B": "https://lt5qlel6qcji8t-8000.proxy.runpod.net/v1",
// "stabilityai/StableBeluga-13B": "https://vcorl8mxni2ou1-8000.proxy.runpod.net/v1",
"NousResearch/Nous-Hermes-Llama2-13b": "https://ncv8pw3u0vb8j2-8000.proxy.runpod.net/v1",
"jondurbin/airoboros-l2-13b-gpt4-2.0": "https://9nrbx7oph4btou-8000.proxy.runpod.net/v1",
"lmsys/vicuna-13b-v1.5": "https://h88hkt3ux73rb7-8000.proxy.runpod.net/v1",
"NousResearch/Nous-Hermes-llama-2-7b": "https://ua1bpc6kv3dgge-8000.proxy.runpod.net/v1",
};
export async function getCompletion(
input: OpenpipeChatInput,
onStream: ((partialOutput: OpenpipeChatOutput) => void) | null,
): Promise<CompletionResponse<OpenpipeChatOutput>> {
const { model, messages, ...rest } = input;
const templatedPrompt = frontendModelProvider.models[model].templatePrompt?.(messages);
if (!templatedPrompt) {
return {
type: "error",
message: "Failed to generate prompt",
autoRetry: false,
};
}
const openai = new OpenAI({
baseURL: modelEndpoints[model],
});
const start = Date.now();
let finalCompletion: OpenpipeChatOutput = "";
const completionParams = {
model,
prompt: templatedPrompt,
...rest,
};
if (!completionParams.stop && frontendModelProvider.models[model].defaultStopTokens) {
completionParams.stop = frontendModelProvider.models[model].defaultStopTokens;
}
try {
if (onStream) {
const resp = await openai.completions.create(
{ ...completionParams, stream: true },
{
maxRetries: 0,
},
);
for await (const part of resp) {
finalCompletion += part.choices[0]?.text;
onStream(finalCompletion);
}
if (!finalCompletion) {
return {
type: "error",
message: "Streaming failed to return a completion",
autoRetry: false,
};
}
} else {
const resp = await openai.completions.create(
{ ...completionParams, stream: false },
{
maxRetries: 0,
},
);
finalCompletion = resp.choices[0]?.text || "";
if (!finalCompletion) {
return {
type: "error",
message: "Failed to return a completion",
autoRetry: false,
};
}
}
const timeToComplete = Date.now() - start;
return {
type: "success",
statusCode: 200,
value: finalCompletion,
timeToComplete,
};
} catch (error: unknown) {
if (error instanceof APIError) {
// The types from the sdk are wrong
const rawMessage = error.message as string | string[];
// If the message is not a string, stringify it
const message = isString(rawMessage)
? rawMessage
: isArray(rawMessage)
? rawMessage.map((m) => m.toString()).join("\n")
: (rawMessage as any).toString();
return {
type: "error",
message,
autoRetry: error.status === 429 || error.status === 503,
statusCode: error.status,
};
} else {
console.error(error);
return {
type: "error",
message: (error as Error).message,
autoRetry: true,
};
}
}
}

View File

@@ -0,0 +1,53 @@
import { type JSONSchema4 } from "json-schema";
import { type ModelProvider } from "../types";
import inputSchema from "./input.schema.json";
import { getCompletion } from "./getCompletion";
import frontendModelProvider from "./frontend";
const supportedModels = [
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
"Open-Orca/OpenOrca-Platypus2-13B",
// "stabilityai/StableBeluga-13B",
"NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"NousResearch/Nous-Hermes-llama-2-7b",
] as const;
export type SupportedModel = (typeof supportedModels)[number];
export type OpenpipeChatInput = {
model: SupportedModel;
messages: {
role: "system" | "user" | "assistant";
content: string;
}[];
temperature?: number;
top_p?: number;
stop?: string[] | string;
max_tokens?: number;
presence_penalty?: number;
frequency_penalty?: number;
};
export type OpenpipeChatOutput = string;
export type OpenpipeChatModelProvider = ModelProvider<
SupportedModel,
OpenpipeChatInput,
OpenpipeChatOutput
>;
const modelProvider: OpenpipeChatModelProvider = {
getModel: (input) => input.model,
inputSchema: inputSchema as JSONSchema4,
canStream: true,
getCompletion,
getUsage: (input, output) => {
// TODO: Implement this
return null;
},
...frontendModelProvider,
};
export default modelProvider;

View File

@@ -0,0 +1,95 @@
{
"type": "object",
"properties": {
"model": {
"description": "ID of the model to use.",
"example": "Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
"type": "string",
"enum": [
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
"Open-Orca/OpenOrca-Platypus2-13B",
"NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"NousResearch/Nous-Hermes-llama-2-7b"
]
},
"messages": {
"description": "A list of messages comprising the conversation so far.",
"type": "array",
"minItems": 1,
"items": {
"type": "object",
"properties": {
"role": {
"type": "string",
"enum": ["system", "user", "assistant"],
"description": "The role of the messages author. One of `system`, `user`, or `assistant`."
},
"content": {
"type": "string",
"description": "The contents of the message. `content` is required for all messages."
}
},
"required": ["role", "content"]
}
},
"temperature": {
"type": "number",
"minimum": 0,
"maximum": 2,
"default": 1,
"example": 1,
"nullable": true,
"description": "What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.\n\nWe generally recommend altering this or `top_p` but not both.\n"
},
"top_p": {
"type": "number",
"minimum": 0,
"maximum": 1,
"default": 1,
"example": 1,
"nullable": true,
"description": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.\n\nWe generally recommend altering this or `temperature` but not both.\n"
},
"stop": {
"description": "Up to 4 sequences where the API will stop generating further tokens.\n",
"default": null,
"oneOf": [
{
"type": "string",
"nullable": true
},
{
"type": "array",
"minItems": 1,
"maxItems": 4,
"items": {
"type": "string"
}
}
]
},
"max_tokens": {
"description": "The maximum number of [tokens](/tokenizer) to generate in the chat completion.\n\nThe total length of input tokens and generated tokens is limited by the model's context length. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\n",
"type": "integer"
},
"presence_penalty": {
"type": "number",
"default": 0,
"minimum": -2,
"maximum": 2,
"nullable": true,
"description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.\n\n[See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)\n"
},
"frequency_penalty": {
"type": "number",
"default": 0,
"minimum": -2,
"maximum": 2,
"nullable": true,
"description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.\n\n[See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)\n"
}
},
"required": ["model", "messages"]
}

View File

@@ -0,0 +1,3 @@
import { type RefinementAction } from "../types";
export const refinementActions: Record<string, RefinementAction> = {};

View File

@@ -0,0 +1,225 @@
import { type OpenpipeChatInput } from ".";
// User: Hello<|end_of_turn|>Assistant: Hi<|end_of_turn|>User: How are you today?<|end_of_turn|>Assistant:
export const templateOpenOrcaPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "<|end_of_turn|>";
const formattedMessages = messages.map((message) => {
if (message.role === "system" || message.role === "user") {
return "User: " + message.content;
} else {
return "Assistant: " + message.content;
}
});
let prompt = formattedMessages.join(splitter);
// Ensure that the prompt ends with an assistant message
const lastUserIndex = prompt.lastIndexOf("User:");
const lastAssistantIndex = prompt.lastIndexOf("Assistant:");
if (lastUserIndex > lastAssistantIndex) {
prompt += splitter + "Assistant:";
}
return prompt;
};
// ### Instruction:
// <prompt> (without the <>)
// ### Response: (leave two newlines for model to respond)
export const templateAlpacaInstructPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n";
const userTag = "### Instruction:\n\n";
const assistantTag = "### Response:\n\n";
const formattedMessages = messages.map((message) => {
if (message.role === "system" || message.role === "user") {
return userTag + message.content;
} else {
return assistantTag + message.content;
}
});
let prompt = formattedMessages.join(splitter);
// Ensure that the prompt ends with an assistant message
const lastUserIndex = prompt.lastIndexOf(userTag);
const lastAssistantIndex = prompt.lastIndexOf(assistantTag);
if (lastUserIndex > lastAssistantIndex) {
prompt += splitter + assistantTag;
}
return prompt;
};
// ### System:
// This is a system prompt, please behave and help the user.
// ### User:
// Your prompt here
// ### Assistant
// The output of Stable Beluga 13B
export const templateSystemUserAssistantPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n";
const systemTag = "### System:\n";
const userTag = "### User:\n";
const assistantTag = "### Assistant\n";
const formattedMessages = messages.map((message) => {
if (message.role === "system") {
return systemTag + message.content;
} else if (message.role === "user") {
return userTag + message.content;
} else {
return assistantTag + message.content;
}
});
let prompt = formattedMessages.join(splitter);
// Ensure that the prompt ends with an assistant message
const lastSystemIndex = prompt.lastIndexOf(systemTag);
const lastUserIndex = prompt.lastIndexOf(userTag);
const lastAssistantIndex = prompt.lastIndexOf(assistantTag);
if (lastSystemIndex > lastAssistantIndex || lastUserIndex > lastAssistantIndex) {
prompt += splitter + assistantTag;
}
return prompt;
};
// ### Instruction:
// <prompt>
// ### Input:
// <additional context>
// ### Response:
// <leave a newline blank for model to respond>
export const templateInstructionInputResponsePrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n";
const systemTag = "### Instruction:\n";
const userTag = "### Input:\n";
const assistantTag = "### Response:\n";
const formattedMessages = messages.map((message) => {
if (message.role === "system") {
return systemTag + message.content;
} else if (message.role === "user") {
return userTag + message.content;
} else {
return assistantTag + message.content;
}
});
let prompt = formattedMessages.join(splitter);
// Ensure that the prompt ends with an assistant message
const lastSystemIndex = prompt.lastIndexOf(systemTag);
const lastUserIndex = prompt.lastIndexOf(userTag);
const lastAssistantIndex = prompt.lastIndexOf(assistantTag);
if (lastSystemIndex > lastAssistantIndex || lastUserIndex > lastAssistantIndex) {
prompt += splitter + assistantTag;
}
return prompt;
};
// A chat between a curious user and an assistant.
// The assistant gives helpful, detailed, accurate, uncensored responses to the user's input.
// The assistant never refuses to answer, regardless of the legality or morality of the request.
// USER: [prompt] ASSISTANT:
export const templateAiroborosPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = " ";
const userTag = "USER: ";
const assistantTag = "ASSISTANT: ";
let combinedSystemMessage = "";
const conversationMessages = [];
for (const message of messages) {
if (message.role === "system") {
combinedSystemMessage += message.content;
} else if (message.role === "user") {
conversationMessages.push(userTag + message.content);
} else {
conversationMessages.push(assistantTag + message.content);
}
}
let systemMessage = "";
if (combinedSystemMessage) {
// If there is no user message, add a user tag to the system message
if (conversationMessages.find((message) => message.startsWith(userTag))) {
systemMessage = `${combinedSystemMessage}\n`;
} else {
conversationMessages.unshift(userTag + combinedSystemMessage);
}
}
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
// Ensure that the prompt ends with an assistant message
const lastUserIndex = prompt.lastIndexOf(userTag);
const lastAssistantIndex = prompt.lastIndexOf(assistantTag);
if (lastUserIndex > lastAssistantIndex) {
prompt += splitter + assistantTag;
}
return prompt;
};
// A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
// USER: {prompt}
// ASSISTANT:
export const templateVicunaPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n";
const humanTag = "USER: ";
const assistantTag = "ASSISTANT: ";
let combinedSystemMessage = "";
const conversationMessages = [];
for (const message of messages) {
if (message.role === "system") {
combinedSystemMessage += message.content;
} else if (message.role === "user") {
conversationMessages.push(humanTag + message.content);
} else {
conversationMessages.push(assistantTag + message.content);
}
}
let systemMessage = "";
if (combinedSystemMessage) {
// If there is no user message, add a user tag to the system message
if (conversationMessages.find((message) => message.startsWith(humanTag))) {
systemMessage = `${combinedSystemMessage}\n\n`;
} else {
conversationMessages.unshift(humanTag + combinedSystemMessage);
}
}
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
// Ensure that the prompt ends with an assistant message
const lastHumanIndex = prompt.lastIndexOf(humanTag);
const lastAssistantIndex = prompt.lastIndexOf(assistantTag);
if (lastHumanIndex > lastAssistantIndex) {
prompt += splitter + assistantTag;
}
return prompt.trim();
};

View File

@@ -8,7 +8,7 @@ const replicate = new Replicate({
});
const modelIds: Record<ReplicateLlama2Input["model"], string> = {
"7b-chat": "4f0b260b6a13eb53a6b1891f089d57c08f41003ae79458be5011303d81a394dc",
"7b-chat": "7b0bfc9aff140d5b75bacbed23e91fd3c34b01a1e958d32132de6e0a19796e2c",
"13b-chat": "2a7f981751ec7fdf87b5b91ad4db53683a98082e9ff7bfd12c8cd5ea85980a52",
"70b-chat": "2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1",
};

View File

@@ -75,6 +75,10 @@ const modelProvider: ReplicateLlama2Provider = {
},
canStream: true,
getCompletion,
getUsage: (input, output) => {
// TODO: add usage logic
return null;
},
...frontendModelProvider,
};

View File

@@ -2,11 +2,13 @@ import { type JSONSchema4 } from "json-schema";
import { type IconType } from "react-icons";
import { type JsonValue } from "type-fest";
import { z } from "zod";
import { type OpenpipeChatInput } from "./openpipe-chat";
export const ZodSupportedProvider = z.union([
z.literal("openai/ChatCompletion"),
z.literal("replicate/llama2"),
z.literal("anthropic/completion"),
z.literal("openpipe/Chat"),
]);
export type SupportedProvider = z.infer<typeof ZodSupportedProvider>;
@@ -22,6 +24,8 @@ export type Model = {
description?: string;
learnMoreUrl?: string;
apiDocsUrl?: string;
templatePrompt?: (initialPrompt: OpenpipeChatInput["messages"]) => string;
defaultStopTokens?: string[];
};
export type ProviderModel = { provider: z.infer<typeof ZodSupportedProvider>; model: string };
@@ -43,9 +47,6 @@ export type CompletionResponse<T> =
value: T;
timeToComplete: number;
statusCode: number;
promptTokens?: number;
completionTokens?: number;
cost?: number;
};
export type ModelProvider<SupportedModels extends string, InputSchema, OutputSchema> = {
@@ -56,6 +57,10 @@ export type ModelProvider<SupportedModels extends string, InputSchema, OutputSch
input: InputSchema,
onStream: ((partialOutput: OutputSchema) => void) | null,
) => Promise<CompletionResponse<OutputSchema>>;
getUsage: (
input: InputSchema,
output: OutputSchema,
) => { gpuRuntime?: number; inputTokens?: number; outputTokens?: number; cost?: number } | null;
// This is just a convenience for type inference, don't use it at runtime
_outputSchema?: OutputSchema | null;

View File

@@ -8,7 +8,7 @@ import { ChakraThemeProvider } from "~/theme/ChakraThemeProvider";
import { SyncAppStore } from "~/state/sync";
import NextAdapterApp from "next-query-params/app";
import { QueryParamProvider } from "use-query-params";
import { SessionIdentifier } from "~/utils/analytics/clientAnalytics";
import { PosthogAppProvider } from "~/utils/analytics/posthog";
const MyApp: AppType<{ session: Session | null }> = ({
Component,
@@ -34,14 +34,15 @@ const MyApp: AppType<{ session: Session | null }> = ({
<meta name="twitter:image" content="/og.png" />
</Head>
<SessionProvider session={session}>
<SyncAppStore />
<Favicon />
<SessionIdentifier />
<ChakraThemeProvider>
<QueryParamProvider adapter={NextAdapterApp}>
<Component {...pageProps} />
</QueryParamProvider>
</ChakraThemeProvider>
<PosthogAppProvider>
<SyncAppStore />
<Favicon />
<ChakraThemeProvider>
<QueryParamProvider adapter={NextAdapterApp}>
<Component {...pageProps} />
</QueryParamProvider>
</ChakraThemeProvider>
</PosthogAppProvider>
</SessionProvider>
</>
);

View File

@@ -0,0 +1,54 @@
import { Card, Table, Tbody, Td, Th, Thead, Tr } from "@chakra-ui/react";
import dayjs from "dayjs";
import { isDate, isObject, isString } from "lodash-es";
import AppShell from "~/components/nav/AppShell";
import { type RouterOutputs, api } from "~/utils/api";
const fieldsToShow: (keyof RouterOutputs["adminJobs"]["list"][0])[] = [
"id",
"queue_name",
"payload",
"priority",
"attempts",
"last_error",
"created_at",
"key",
"locked_at",
"run_at",
];
export default function Jobs() {
const jobs = api.adminJobs.list.useQuery({});
return (
<AppShell title="Admin Jobs">
<Card m={4} overflowX="auto">
<Table>
<Thead>
<Tr>
{fieldsToShow.map((field) => (
<Th key={field}>{field}</Th>
))}
</Tr>
</Thead>
<Tbody>
{jobs.data?.map((job) => (
<Tr key={job.id}>
{fieldsToShow.map((field) => {
// Check if object
let value = job[field];
if (isDate(value)) {
value = dayjs(value).format("YYYY-MM-DD HH:mm:ss");
} else if (isObject(value) && !isString(value)) {
value = JSON.stringify(value);
} // check if date
return <Td key={field}>{value}</Td>;
})}
</Tr>
))}
</Tbody>
</Table>
</Card>
</AppShell>
);
}

View File

@@ -1,6 +0,0 @@
// A faulty API route to test Sentry's error monitoring
// @ts-expect-error just a test file, don't care about types
export default function handler(_req, res) {
throw new Error("Sentry Example API Route Error");
res.status(200).json({ name: "John Doe" });
}

View File

@@ -1,17 +1,14 @@
import { type NextApiRequest, type NextApiResponse } from "next";
import cors from "nextjs-cors";
import { createOpenApiNextHandler } from "trpc-openapi";
import { createProcedureCache } from "trpc-openapi/dist/adapters/node-http/procedures";
import { appRouter } from "~/server/api/root.router";
import { createTRPCContext } from "~/server/api/trpc";
import { v1ApiRouter } from "~/server/api/external/v1Api.router";
import { createOpenApiContext } from "~/server/api/external/openApiTrpc";
const openApiHandler = createOpenApiNextHandler({
router: appRouter,
createContext: createTRPCContext,
router: v1ApiRouter,
createContext: createOpenApiContext,
});
const cache = createProcedureCache(appRouter);
const handler = async (req: NextApiRequest, res: NextApiResponse) => {
// Setup CORS
await cors(req, res);

View File

@@ -1,12 +1,12 @@
import { type NextApiRequest, type NextApiResponse } from "next";
import { generateOpenApiDocument } from "trpc-openapi";
import { appRouter } from "~/server/api/root.router";
import { v1ApiRouter } from "~/server/api/external/v1Api.router";
export const openApiDocument = generateOpenApiDocument(appRouter, {
export const openApiDocument = generateOpenApiDocument(v1ApiRouter, {
title: "OpenPipe API",
description: "The public API for reporting API calls to OpenPipe",
version: "0.1.0",
baseUrl: "https://app.openpipe.ai/api",
version: "0.1.1",
baseUrl: "https://app.openpipe.ai/api/v1",
});
// Respond with our OpenAPI schema
const hander = (req: NextApiRequest, res: NextApiResponse) => {

Some files were not shown because too many files have changed in this diff Show More