Compare commits
304 Commits
world-cham
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5df898a4f6 | ||
|
|
a33f674ccd | ||
|
|
0062952eb2 | ||
|
|
381604bc88 | ||
|
|
db69b8e496 | ||
|
|
88be0b07a9 | ||
|
|
ff621f2191 | ||
|
|
1e98972b6a | ||
|
|
c5bca87486 | ||
|
|
fc1f15fee7 | ||
|
|
606a524c11 | ||
|
|
82b94657b1 | ||
|
|
0a642fac2a | ||
|
|
4d90ff68c8 | ||
|
|
6ac554f7e1 | ||
|
|
422a6ff4c6 | ||
|
|
6153ebda41 | ||
|
|
b682bd6b78 | ||
|
|
43a22865fd | ||
|
|
5b8113d8e7 | ||
|
|
96a589e401 | ||
|
|
16354d83df | ||
|
|
6a5afd0c9b | ||
|
|
1684663ddc | ||
|
|
70fae68225 | ||
|
|
518c8620d0 | ||
|
|
ab87794192 | ||
|
|
48aa697002 | ||
|
|
38e28fa30a | ||
|
|
55f2be861e | ||
|
|
fa87887e91 | ||
|
|
28713fb3ef | ||
|
|
ead981b900 | ||
|
|
e0d0cc0df1 | ||
|
|
b4cb931f6c | ||
|
|
7df1c59bd3 | ||
|
|
c83863f468 | ||
|
|
40638a7848 | ||
|
|
33ca98b267 | ||
|
|
39c943f2ec | ||
|
|
14eae45d18 | ||
|
|
2aa4ac1594 | ||
|
|
13bac46e0b | ||
|
|
42ade01f22 | ||
|
|
59b79049c1 | ||
|
|
0d7433cb7e | ||
|
|
12d01cd3d5 | ||
|
|
ec59252010 | ||
|
|
87e2339df2 | ||
|
|
75ad6619a5 | ||
|
|
4b8941d53a | ||
|
|
0d691d17cc | ||
|
|
815d4faad2 | ||
|
|
9632ccbc71 | ||
|
|
a4131e4a10 | ||
|
|
db1c8f171d | ||
|
|
678392ef17 | ||
|
|
af722128e8 | ||
|
|
50a79b6e3a | ||
|
|
f59150ff5b | ||
|
|
b58e0a8d54 | ||
|
|
dc82a3fa82 | ||
|
|
fedbf5784e | ||
|
|
888c04af50 | ||
|
|
1b36453051 | ||
|
|
2f37b3ed87 | ||
|
|
8fa7b691db | ||
|
|
17866a5249 | ||
|
|
947eba3216 | ||
|
|
ef1f9458f4 | ||
|
|
c6c7e746ee | ||
|
|
3be0a90960 | ||
|
|
9b1f2ac30a | ||
|
|
1b394cc72b | ||
|
|
26b9731bab | ||
|
|
7c8ec8f6a7 | ||
|
|
10dd53e7f6 | ||
|
|
b1802fc04b | ||
|
|
f2135ddc72 | ||
|
|
ca89eafb0b | ||
|
|
b50d47beaf | ||
|
|
733d53625b | ||
|
|
a5e59e4235 | ||
|
|
d0102e3202 | ||
|
|
bd571c4c4e | ||
|
|
296eb23d97 | ||
|
|
4e2ae7a441 | ||
|
|
072dcee376 | ||
|
|
94464c0617 | ||
|
|
980644f13c | ||
|
|
6a56250001 | ||
|
|
b1c7bbbd4a | ||
|
|
3e20fa31ca | ||
|
|
48a8e64be1 | ||
|
|
f3a5f11195 | ||
|
|
da5cbaf4dc | ||
|
|
acf74909c9 | ||
|
|
edac8da4a8 | ||
|
|
687f3dd85f | ||
|
|
0cef3ab5bd | ||
|
|
756b3185de | ||
|
|
3776ffc4c3 | ||
|
|
82549122e1 | ||
|
|
56a96a7db6 | ||
|
|
1596b15727 | ||
|
|
70d4a5bd9a | ||
|
|
c6ec901374 | ||
|
|
ad7665664a | ||
|
|
108e3d1e85 | ||
|
|
76f600722a | ||
|
|
d9a0e4581f | ||
|
|
b9251ad93c | ||
|
|
809ef04dc1 | ||
|
|
0fba2c9ee7 | ||
|
|
ac2ca0f617 | ||
|
|
73b9e40ced | ||
|
|
3447e863cc | ||
|
|
897e77b054 | ||
|
|
b22a4cd93b | ||
|
|
3547c85c86 | ||
|
|
9636fa033e | ||
|
|
890a738568 | ||
|
|
7003595e76 | ||
|
|
00df4453d3 | ||
|
|
4c325fc1cc | ||
|
|
dfee8a0ed7 | ||
|
|
0b4e116783 | ||
|
|
2bcb1d16a3 | ||
|
|
6e7efee21e | ||
|
|
bb9c3a9e61 | ||
|
|
11bfb5d5e4 | ||
|
|
b00ab933b3 | ||
|
|
8f4e7f7e2e | ||
|
|
634739c045 | ||
|
|
9a9cbe8fd4 | ||
|
|
649dc3376b | ||
|
|
05e774d021 | ||
|
|
0e328b13dc | ||
|
|
0a18ca9cd6 | ||
|
|
a5fe35912e | ||
|
|
3d3ddbe7a9 | ||
|
|
d8a5617dee | ||
|
|
5da62fdc29 | ||
|
|
754e273049 | ||
|
|
2863dc2f89 | ||
|
|
c4cef35717 | ||
|
|
8552baf632 | ||
|
|
f41e2229ca | ||
|
|
e649f42c9c | ||
|
|
99f305483b | ||
|
|
b28f4cad57 | ||
|
|
df4a3a0950 | ||
|
|
e423ad656a | ||
|
|
7d0d94de3a | ||
|
|
344b257db4 | ||
|
|
28b43b6e6d | ||
|
|
8d373ec9b5 | ||
|
|
537525667d | ||
|
|
519367c553 | ||
|
|
1a338ec863 | ||
|
|
01d0b8f778 | ||
|
|
d99836ec30 | ||
|
|
33751c12d2 | ||
|
|
89815e1f7f | ||
|
|
5fa5109f34 | ||
|
|
b06ab2cbf9 | ||
|
|
35fb554038 | ||
|
|
f238177277 | ||
|
|
723c0f7505 | ||
|
|
ce6936f753 | ||
|
|
2a80cbf74a | ||
|
|
098805ef25 | ||
|
|
ed90bc5a99 | ||
|
|
de9be8c7ce | ||
|
|
3e02bcf9b8 | ||
|
|
cef2ee31fb | ||
|
|
d7cff0f52e | ||
|
|
228c547839 | ||
|
|
e1fcc8fb38 | ||
|
|
8ed47eb4dd | ||
|
|
3a908d51aa | ||
|
|
d9db6d80ea | ||
|
|
8d1ee62ff1 | ||
|
|
f270579283 | ||
|
|
81fbaeae44 | ||
|
|
5277afa199 | ||
|
|
76c34d64e6 | ||
|
|
454ac9a0d3 | ||
|
|
5ed7adadf9 | ||
|
|
b8e0f392ab | ||
|
|
b2af83341d | ||
|
|
e6d229d5f9 | ||
|
|
1a6ae3aef7 | ||
|
|
9051d80775 | ||
|
|
6c060c6ea0 | ||
|
|
f70e73e338 | ||
|
|
16aa6672fc | ||
|
|
ac99c8e0f7 | ||
|
|
df121db78c | ||
|
|
816b41adad | ||
|
|
f09dfe18be | ||
|
|
868d7084f0 | ||
|
|
f6f04e537e | ||
|
|
4feb3e5829 | ||
|
|
c62ced867a | ||
|
|
7bb414026e | ||
|
|
1b2b6c1456 | ||
|
|
760bfbbe32 | ||
|
|
3424aa36ba | ||
|
|
ded86cba08 | ||
|
|
65a0f9065f | ||
|
|
2861c64428 | ||
|
|
ca33bb0b08 | ||
|
|
72e680e77c | ||
|
|
5dd7e67396 | ||
|
|
fd286f6874 | ||
|
|
7a4aa5f0aa | ||
|
|
cb791e3c73 | ||
|
|
a2c322ff43 | ||
|
|
a2ace63f25 | ||
|
|
41d06596cb | ||
|
|
49c68fdbf2 | ||
|
|
6188f55569 | ||
|
|
ea91d692d3 | ||
|
|
ae7acbfdd4 | ||
|
|
b9396e63cc | ||
|
|
753a48f6e9 | ||
|
|
bd7c8b43b0 | ||
|
|
a1249f17c9 | ||
|
|
6f8db40f74 | ||
|
|
8c5345a291 | ||
|
|
f47010a6e7 | ||
|
|
6d32f1c06e | ||
|
|
8fed9730da | ||
|
|
0f9a83cf45 | ||
|
|
9f17d98736 | ||
|
|
74029e5478 | ||
|
|
d220cd30e8 | ||
|
|
c0f10cd522 | ||
|
|
dc497dbd99 | ||
|
|
f8f855adf4 | ||
|
|
8f49bace53 | ||
|
|
c9f59bfb79 | ||
|
|
57166e96b4 | ||
|
|
1a838824ae | ||
|
|
6b304f8456 | ||
|
|
a53d70d8b2 | ||
|
|
109a9ddb1e | ||
|
|
7f8b574c9f | ||
|
|
9e859c199e | ||
|
|
deabbb094b | ||
|
|
7637b94ea7 | ||
|
|
721f1726eb | ||
|
|
cfeb4dfa92 | ||
|
|
21ef67ed4c | ||
|
|
7707d451e0 | ||
|
|
d82782adb4 | ||
|
|
e10589abff | ||
|
|
01dcbfc896 | ||
|
|
50e0b34d30 | ||
|
|
44bb9fc58d | ||
|
|
c0d3784f0c | ||
|
|
e522026b71 | ||
|
|
46b13d85b7 | ||
|
|
c12aa82a3e | ||
|
|
b98bce8944 | ||
|
|
f045c80dfd | ||
|
|
3b460dff2a | ||
|
|
5fa5732804 | ||
|
|
28e6e2b9df | ||
|
|
54d1df4442 | ||
|
|
f69c2b5f23 | ||
|
|
51f0666f6a | ||
|
|
b67d974f4c | ||
|
|
33fb2db981 | ||
|
|
e391379c3e | ||
|
|
8d1609dd52 | ||
|
|
f3380f302d | ||
|
|
3dba9c7ee1 | ||
|
|
e0e4f7a9d6 | ||
|
|
48293dc579 | ||
|
|
38ac6243a0 | ||
|
|
bd2f58e2a5 | ||
|
|
808e47c6b9 | ||
|
|
5945f0ed6b | ||
|
|
6bc7d76d15 | ||
|
|
e9ed173e34 | ||
|
|
75d58d7021 | ||
|
|
896c8c5c57 | ||
|
|
ec5547d0b0 | ||
|
|
77e4e3b8c3 | ||
|
|
a1b03ddad1 | ||
|
|
6be32bea4c | ||
|
|
72c70e2a55 | ||
|
|
026532f2c2 | ||
|
|
f88538336f | ||
|
|
3c7178115e | ||
|
|
292aaf090a | ||
|
|
d9915dc41b | ||
|
|
3560bcff14 | ||
|
|
6982339a1a | ||
|
|
d348b130d5 | ||
|
|
bf67580991 | ||
|
|
6184498810 |
5
.dockerignore
Normal file
@@ -0,0 +1,5 @@
|
||||
**/node_modules/
|
||||
.git
|
||||
**/.venv/
|
||||
**/.env*
|
||||
**/.next/
|
||||
14
.github/ISSUE_TEMPLATE/sweep-fast-template.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Sweep Fast Issue
|
||||
title: 'Sweep (fast): '
|
||||
description: For few-line fixes to be handled by Sweep, an AI-powered junior developer. Sweep will use GPT-3.5 to quickly create a PR for very small changes.
|
||||
labels: sweep
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Details
|
||||
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
|
||||
placeholder: |
|
||||
Bugs: The bug might be in ... file. Here are the logs: ...
|
||||
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
|
||||
Refactors: We are migrating this function to ... version because ...
|
||||
14
.github/ISSUE_TEMPLATE/sweep-slow-template.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Sweep Slow Issue
|
||||
title: 'Sweep (slow): '
|
||||
description: For larger bugs, features, refactors, and tests to be handled by Sweep, an AI-powered junior developer. Sweep will perform a deeper search and more self-reviews but will take longer.
|
||||
labels: sweep
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Details
|
||||
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
|
||||
placeholder: |
|
||||
Bugs: The bug might be in ... file. Here are the logs: ...
|
||||
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
|
||||
Refactors: We are migrating this function to ... version because ...
|
||||
14
.github/ISSUE_TEMPLATE/sweep-template.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Sweep Issue
|
||||
title: 'Sweep: '
|
||||
description: For small bugs, features, refactors, and tests to be handled by Sweep, an AI-powered junior developer.
|
||||
labels: sweep
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Details
|
||||
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
|
||||
placeholder: |
|
||||
Bugs: The bug might be in ... file. Here are the logs: ...
|
||||
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
|
||||
Refactors: We are migrating this function to ... version because ...
|
||||
@@ -6,6 +6,10 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: app
|
||||
|
||||
jobs:
|
||||
run-checks:
|
||||
runs-on: ubuntu-latest
|
||||
44
.gitignore
vendored
@@ -1,42 +1,6 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# database
|
||||
/prisma/db.sqlite
|
||||
/prisma/db.sqlite-journal
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
next-env.d.ts
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# local env files
|
||||
# do not commit any .env files to git, except for the .env.example file. https://create.t3.gg/en/usage/env-variables#using-environment-variables
|
||||
.env
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
.venv/
|
||||
*.pyc
|
||||
node_modules/
|
||||
*.tsbuildinfo
|
||||
dist/
|
||||
@@ -1,2 +1,2 @@
|
||||
*.schema.json
|
||||
pnpm-lock.yaml
|
||||
app/pnpm-lock.yaml
|
||||
|
||||
42
Dockerfile
@@ -1,42 +0,0 @@
|
||||
# Adapted from https://create.t3.gg/en/deployment/docker#3-create-dockerfile
|
||||
|
||||
FROM node:20.1.0-bullseye as base
|
||||
RUN yarn global add pnpm
|
||||
|
||||
# DEPS
|
||||
FROM base as deps
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY prisma ./
|
||||
|
||||
COPY package.json pnpm-lock.yaml ./
|
||||
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
# BUILDER
|
||||
FROM base as builder
|
||||
|
||||
# Include all NEXT_PUBLIC_* env vars here
|
||||
ARG NEXT_PUBLIC_POSTHOG_KEY
|
||||
ARG NEXT_PUBLIC_SOCKET_URL
|
||||
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
RUN SKIP_ENV_VALIDATION=1 pnpm build
|
||||
|
||||
# RUNNER
|
||||
FROM base as runner
|
||||
WORKDIR /app
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
COPY --from=builder /app/ ./
|
||||
|
||||
EXPOSE 3000
|
||||
ENV PORT 3000
|
||||
|
||||
# Run the "run-prod.sh" script
|
||||
CMD /app/run-prod.sh
|
||||
110
README.md
@@ -1,51 +1,80 @@
|
||||
<img src="https://github.com/openpipe/openpipe/assets/41524992/ca59596e-eb80-40f9-921f-6d67f6e6d8fa" width="72px" />
|
||||
<p align="center">
|
||||
<a href="https://openpipe.ai">
|
||||
<img height="70" src="https://github.com/openpipe/openpipe/assets/41524992/70af25fb-1f90-42d9-8a20-3606e3b5aaba" alt="logo">
|
||||
</a>
|
||||
</p>
|
||||
<h1 align="center">
|
||||
OpenPipe
|
||||
</h1>
|
||||
|
||||
# OpenPipe
|
||||
<p align="center">
|
||||
<i>Turn expensive prompts into cheap fine-tuned models.</i>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="/LICENSE"><img alt="License Apache-2.0" src="https://img.shields.io/github/license/openpipe/openpipe?style=flat-square"></a>
|
||||
<a href='http://makeapullrequest.com'><img alt='PRs Welcome' src='https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square'/></a>
|
||||
<a href="https://github.com/openpipe/openpipe/graphs/commit-activity"><img alt="GitHub commit activity" src="https://img.shields.io/github/commit-activity/m/openpipe/openpipe?style=flat-square"/></a>
|
||||
<a href="https://github.com/openpipe/openpipe/issues"><img alt="GitHub closed issues" src="https://img.shields.io/github/issues-closed/openpipe/openpipe?style=flat-square"/></a>
|
||||
<img src="https://img.shields.io/badge/Y%20Combinator-S23-orange?style=flat-square" alt="Y Combinator S23">
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://app.openpipe.ai/">Hosted App</a> - <a href="#running-locally">Running Locally</a> - <a href="#sample-experiments">Experiments</a>
|
||||
</p>
|
||||
|
||||
<br>
|
||||
Use powerful but expensive LLMs to fine-tune smaller and cheaper models suited to your exact needs. Evaluate model and prompt combinations in the playground. Query your past requests and export optimized training data. Try it out at https://app.openpipe.ai or <a href="#running-locally">run it locally</a>.
|
||||
<br>
|
||||
|
||||
|
||||
## Features
|
||||
|
||||
* <b>Experiment</b>
|
||||
* Bulk-test wide-reaching scenarios using code templating.
|
||||
* Seamlessly translate prompts across different model APIs.
|
||||
* Tap into autogenerated scenarios for fresh test perspectives.
|
||||
|
||||
* <b>Fine-Tune (Beta)</b>
|
||||
* Easy integration with OpenPipe's SDK in both Python and JS.
|
||||
* Swiftly query logs using intuitive built-in filters.
|
||||
* Export data in multiple training formats, including Alpaca and ChatGPT, with deduplication.
|
||||
|
||||
<img src="https://github.com/openpipe/openpipe/assets/41524992/eaa8b92d-4536-4f63-bbef-4b0b1a60f6b5" alt="fine-tune demo">
|
||||
|
||||
<!-- <img height="400px" src="https://github.com/openpipe/openpipe/assets/41524992/66bb1843-cb72-4130-a369-eec2df3b8201" alt="playground demo"> -->
|
||||
|
||||
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts with realistic sample data.
|
||||
|
||||
## Sample Experiments
|
||||
|
||||
These are simple experiments users have created that show how OpenPipe works.
|
||||
These are sample experiments users have created that show how OpenPipe works. Feel free to fork them and start experimenting yourself.
|
||||
|
||||
- [Country Capitals](https://app.openpipe.ai/experiments/11111111-1111-1111-1111-111111111111)
|
||||
- [Twitter Sentiment Analysis](https://app.openpipe.ai/experiments/62c20a73-2012-4a64-973c-4b665ad46a57)
|
||||
- [Reddit User Needs](https://app.openpipe.ai/experiments/22222222-2222-2222-2222-222222222222)
|
||||
- [OpenAI Function Calls](https://app.openpipe.ai/experiments/2ebbdcb3-ed51-456e-87dc-91f72eaf3e2b)
|
||||
- [Activity Classification](https://app.openpipe.ai/experiments/3950940f-ab6b-4b74-841d-7e9dbc4e4ff8)
|
||||
|
||||
<img src="https://github.com/openpipe/openpipe/assets/176426/fc7624c6-5b65-4d4d-82b7-4a816f3e5678" alt="demo" height="400px">
|
||||
|
||||
You can use our hosted version of OpenPipe at [https://openpipe.ai]. You can also clone this repository and [run it locally](#running-locally).
|
||||
|
||||
## High-Level Features
|
||||
|
||||
**Configure Multiple Prompts**
|
||||
Set up multiple prompt configurations and compare their output side-by-side. Each configuration can be configured independently.
|
||||
|
||||
**Visualize Responses**
|
||||
Inspect prompt completions side-by-side.
|
||||
|
||||
**Test Many Inputs**
|
||||
OpenPipe lets you _template_ a prompt. Use the templating feature to run the prompts you're testing against many potential inputs for broader coverage of your problem space than you'd get with manual testing.
|
||||
|
||||
**🪄 Auto-generate Test Scenarios**
|
||||
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
|
||||
|
||||
**Prompt Validation and Typeahead**
|
||||
We use OpenAI's OpenAPI spec to automatically provide typeahead and validate prompts.
|
||||
|
||||
<img alt="typeahead" src="https://github.com/openpipe/openpipe/assets/176426/acc638f8-d851-4742-8d01-fe6f98890840" height="300px">
|
||||
|
||||
**Function Call Support**
|
||||
Natively supports [OpenAI function calls](https://openai.com/blog/function-calling-and-other-api-updates) on supported models.
|
||||
|
||||
<img height="300px" alt="function calls" src="https://github.com/openpipe/openpipe/assets/176426/48ad13fe-af2f-4294-bf32-62015597fd9b">
|
||||
|
||||
## Supported Models
|
||||
|
||||
- All models available through the OpenAI [chat completion API](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
|
||||
- Llama2 [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat), [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat), [70b chat](https://replicate.com/replicate/llama70b-v2-chat).
|
||||
- Anthropic's [Claude 1 Instant](https://www.anthropic.com/index/introducing-claude) and [Claude 2](https://www.anthropic.com/index/claude-2)
|
||||
#### OpenAI
|
||||
- [GPT 3.5 Turbo](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
|
||||
- [GPT 3.5 Turbo 16k](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
|
||||
- [GPT 4](https://openai.com/gpt-4)
|
||||
#### Llama2
|
||||
- [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat)
|
||||
- [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat)
|
||||
- [70b chat](https://replicate.com/replicate/llama70b-v2-chat)
|
||||
#### Llama2 Fine-Tunes
|
||||
- [Open-Orca/OpenOrcaxOpenChat-Preview2-13B](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B)
|
||||
- [Open-Orca/OpenOrca-Platypus2-13B](https://huggingface.co/Open-Orca/OpenOrca-Platypus2-13B)
|
||||
- [NousResearch/Nous-Hermes-Llama2-13b](https://huggingface.co/NousResearch/Nous-Hermes-Llama2-13b)
|
||||
- [jondurbin/airoboros-l2-13b-gpt4-2.0](https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0)
|
||||
- [lmsys/vicuna-13b-v1.5](https://huggingface.co/lmsys/vicuna-13b-v1.5)
|
||||
- [Gryphe/MythoMax-L2-13b](https://huggingface.co/Gryphe/MythoMax-L2-13b)
|
||||
- [NousResearch/Nous-Hermes-llama-2-7b](https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b)
|
||||
#### Anthropic
|
||||
- [Claude 1 Instant](https://www.anthropic.com/index/introducing-claude)
|
||||
- [Claude 2](https://www.anthropic.com/index/claude-2)
|
||||
|
||||
## Running Locally
|
||||
|
||||
@@ -55,7 +84,14 @@ Natively supports [OpenAI function calls](https://openai.com/blog/function-calli
|
||||
4. Clone this repository: `git clone https://github.com/openpipe/openpipe`
|
||||
5. Install the dependencies: `cd openpipe && pnpm install`
|
||||
6. Create a `.env` file (`cp .env.example .env`) and enter your `OPENAI_API_KEY`.
|
||||
7. Update `DATABASE_URL` if necessary to point to your Postgres instance and run `pnpm prisma db push` to create the database.
|
||||
7. Update `DATABASE_URL` if necessary to point to your Postgres instance and run `pnpm prisma migrate dev` to create the database.
|
||||
8. Create a [GitHub OAuth App](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app) and update the `GITHUB_CLIENT_ID` and `GITHUB_CLIENT_SECRET` values. (Note: a PR to make auth optional when running locally would be a great contribution!)
|
||||
9. Start the app: `pnpm dev`.
|
||||
10. Navigate to [http://localhost:3000](http://localhost:3000)
|
||||
|
||||
## Testing Locally
|
||||
|
||||
1. Copy your `.env` file to `.env.test`.
|
||||
2. Update the `DATABASE_URL` to have a different database name than your development one
|
||||
3. Run `DATABASE_URL=[your new datatase url] pnpm prisma migrate dev --skip-seed --skip-generate`
|
||||
4. Run `pnpm test`
|
||||
|
||||
@@ -26,6 +26,22 @@ NEXT_PUBLIC_SOCKET_URL="http://localhost:3318"
|
||||
NEXTAUTH_SECRET="your_secret"
|
||||
NEXTAUTH_URL="http://localhost:3000"
|
||||
|
||||
NEXT_PUBLIC_HOST="http://localhost:3000"
|
||||
|
||||
# Next Auth Github Provider
|
||||
GITHUB_CLIENT_ID="your_client_id"
|
||||
GITHUB_CLIENT_SECRET="your_secret"
|
||||
|
||||
OPENPIPE_BASE_URL="http://localhost:3000/api/v1"
|
||||
OPENPIPE_API_KEY="your_key"
|
||||
|
||||
SENDER_EMAIL="placeholder"
|
||||
SMTP_HOST="placeholder"
|
||||
SMTP_PORT="placeholder"
|
||||
SMTP_LOGIN="placeholder"
|
||||
SMTP_PASSWORD="placeholder"
|
||||
|
||||
# Azure credentials are necessary for uploading large training data files
|
||||
AZURE_STORAGE_ACCOUNT_NAME="placeholder"
|
||||
AZURE_STORAGE_ACCOUNT_KEY="placeholder"
|
||||
AZURE_STORAGE_CONTAINER_NAME="placeholder"
|
||||
@@ -6,7 +6,7 @@ const config = {
|
||||
overrides: [
|
||||
{
|
||||
extends: ["plugin:@typescript-eslint/recommended-requiring-type-checking"],
|
||||
files: ["*.ts", "*.tsx"],
|
||||
files: ["*.mts", "*.ts", "*.tsx"],
|
||||
parserOptions: {
|
||||
project: path.join(__dirname, "tsconfig.json"),
|
||||
},
|
||||
53
app/.gitignore
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# database
|
||||
/prisma/db.sqlite
|
||||
/prisma/db.sqlite-journal
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
next-env.d.ts
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# local env files
|
||||
# do not commit any .env files to git, except for the .env.example file. https://create.t3.gg/en/usage/env-variables#using-environment-variables
|
||||
.env
|
||||
.env*.local
|
||||
.env.test
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
|
||||
# Sentry Auth Token
|
||||
.sentryclirc
|
||||
|
||||
# custom openai intialization
|
||||
src/server/utils/openaiCustomConfig.json
|
||||
|
||||
# yalc
|
||||
.yalc
|
||||
yalc.lock
|
||||
@@ -12,12 +12,23 @@ declare module "nextjs-routes" {
|
||||
|
||||
export type Route =
|
||||
| StaticRoute<"/account/signin">
|
||||
| StaticRoute<"/admin/jobs">
|
||||
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
|
||||
| StaticRoute<"/api/experiments/og-image">
|
||||
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
|
||||
| DynamicRoute<"/experiments/[id]", { "id": string }>
|
||||
| DynamicRoute<"/api/v1/[...trpc]", { "trpc": string[] }>
|
||||
| StaticRoute<"/api/v1/openapi">
|
||||
| StaticRoute<"/dashboard">
|
||||
| DynamicRoute<"/datasets/[id]", { "id": string }>
|
||||
| StaticRoute<"/datasets">
|
||||
| DynamicRoute<"/experiments/[experimentSlug]", { "experimentSlug": string }>
|
||||
| StaticRoute<"/experiments">
|
||||
| StaticRoute<"/fine-tunes">
|
||||
| StaticRoute<"/">
|
||||
| DynamicRoute<"/invitations/[invitationToken]", { "invitationToken": string }>
|
||||
| StaticRoute<"/project/settings">
|
||||
| StaticRoute<"/request-logs">
|
||||
| StaticRoute<"/sentry-example-page">
|
||||
| StaticRoute<"/world-champs">
|
||||
| StaticRoute<"/world-champs/signup">;
|
||||
|
||||
47
app/Dockerfile
Normal file
@@ -0,0 +1,47 @@
|
||||
# Adapted from https://create.t3.gg/en/deployment/docker#3-create-dockerfile
|
||||
|
||||
FROM node:20.1.0-bullseye as base
|
||||
RUN yarn global add pnpm
|
||||
|
||||
# DEPS
|
||||
FROM base as deps
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
COPY app/prisma app/package.json ./app/
|
||||
COPY client-libs/typescript/package.json ./client-libs/typescript/
|
||||
COPY pnpm-lock.yaml pnpm-workspace.yaml ./
|
||||
|
||||
RUN cd app && pnpm install --frozen-lockfile
|
||||
|
||||
# BUILDER
|
||||
FROM base as builder
|
||||
|
||||
# Include all NEXT_PUBLIC_* env vars here
|
||||
ARG NEXT_PUBLIC_POSTHOG_KEY
|
||||
ARG NEXT_PUBLIC_SOCKET_URL
|
||||
ARG NEXT_PUBLIC_HOST
|
||||
ARG NEXT_PUBLIC_SENTRY_DSN
|
||||
ARG SENTRY_AUTH_TOKEN
|
||||
|
||||
WORKDIR /code
|
||||
COPY --from=deps /code/node_modules ./node_modules
|
||||
COPY --from=deps /code/app/node_modules ./app/node_modules
|
||||
COPY --from=deps /code/client-libs/typescript/node_modules ./client-libs/typescript/node_modules
|
||||
COPY . .
|
||||
RUN cd app && SKIP_ENV_VALIDATION=1 pnpm build
|
||||
|
||||
# RUNNER
|
||||
FROM base as runner
|
||||
WORKDIR /code/app
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
COPY --from=builder /code/ /code/
|
||||
|
||||
EXPOSE 3000
|
||||
ENV PORT 3000
|
||||
|
||||
# Run the "run-prod.sh" script
|
||||
CMD /code/app/scripts/run-prod.sh
|
||||
63
app/next.config.mjs
Normal file
@@ -0,0 +1,63 @@
|
||||
import nextRoutes from "nextjs-routes/config";
|
||||
import { withSentryConfig } from "@sentry/nextjs";
|
||||
|
||||
/**
|
||||
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially useful
|
||||
* for Docker builds.
|
||||
*/
|
||||
const { env } = await import("./src/env.mjs");
|
||||
|
||||
/** @type {import("next").NextConfig} */
|
||||
let config = {
|
||||
reactStrictMode: true,
|
||||
|
||||
/**
|
||||
* If you have `experimental: { appDir: true }` set, then you must comment the below `i18n` config
|
||||
* out.
|
||||
*
|
||||
* @see https://github.com/vercel/next.js/issues/41980
|
||||
*/
|
||||
i18n: {
|
||||
locales: ["en"],
|
||||
defaultLocale: "en",
|
||||
},
|
||||
|
||||
rewrites: async () => [
|
||||
{
|
||||
source: "/ingest/:path*",
|
||||
destination: "https://app.posthog.com/:path*",
|
||||
},
|
||||
],
|
||||
|
||||
webpack: (config) => {
|
||||
config.module.rules.push({
|
||||
test: /\.txt$/,
|
||||
use: "raw-loader",
|
||||
});
|
||||
return config;
|
||||
},
|
||||
|
||||
transpilePackages: ["openpipe"],
|
||||
};
|
||||
|
||||
config = nextRoutes()(config);
|
||||
|
||||
if (env.NEXT_PUBLIC_SENTRY_DSN && env.SENTRY_AUTH_TOKEN) {
|
||||
// @ts-expect-error - `withSentryConfig` is not typed correctly
|
||||
config = withSentryConfig(
|
||||
config,
|
||||
{
|
||||
authToken: env.SENTRY_AUTH_TOKEN,
|
||||
silent: true,
|
||||
org: "openpipe",
|
||||
project: "openpipe",
|
||||
},
|
||||
{
|
||||
widenClientFileUpload: true,
|
||||
tunnelRoute: "/monitoring",
|
||||
disableLogger: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
export default config;
|
||||
7
app/openapitools.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json",
|
||||
"spaces": 2,
|
||||
"generator-cli": {
|
||||
"version": "6.6.0"
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"name": "openpipe",
|
||||
"name": "openpipe-app",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
@@ -9,24 +10,29 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "next build",
|
||||
"dev:next": "next dev",
|
||||
"dev:next": "TZ=UTC next dev",
|
||||
"dev:wss": "pnpm tsx --watch src/wss-server.ts",
|
||||
"dev:worker": "NODE_ENV='development' pnpm tsx --watch src/server/tasks/worker.ts",
|
||||
"dev": "concurrently --kill-others 'pnpm dev:next' 'pnpm dev:wss' 'pnpm dev:worker'",
|
||||
"worker": "NODE_ENV='development' pnpm tsx --watch src/server/tasks/worker.ts",
|
||||
"dev": "concurrently --kill-others 'pnpm dev:next' 'pnpm dev:wss' 'pnpm worker --watch'",
|
||||
"postinstall": "prisma generate",
|
||||
"lint": "next lint",
|
||||
"start": "next start",
|
||||
"codegen": "tsx src/codegen/export-openai-types.ts",
|
||||
"start": "TZ=UTC next start",
|
||||
"codegen:clients": "tsx src/server/scripts/client-codegen.ts",
|
||||
"codegen:db": "prisma generate && kysely-codegen --dialect postgres --out-file src/server/db.types.ts",
|
||||
"seed": "tsx prisma/seed.ts",
|
||||
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'"
|
||||
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'",
|
||||
"test": "pnpm vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.5.8",
|
||||
"@apidevtools/json-schema-ref-parser": "^10.1.0",
|
||||
"@babel/preset-typescript": "^7.22.5",
|
||||
"@azure/identity": "^3.3.0",
|
||||
"@azure/storage-blob": "12.15.0",
|
||||
"@babel/standalone": "^7.22.9",
|
||||
"@chakra-ui/anatomy": "^2.2.0",
|
||||
"@chakra-ui/next-js": "^2.1.4",
|
||||
"@chakra-ui/react": "^2.7.1",
|
||||
"@chakra-ui/styled-system": "^2.9.1",
|
||||
"@emotion/react": "^11.11.1",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
@@ -34,6 +40,8 @@
|
||||
"@monaco-editor/loader": "^1.3.3",
|
||||
"@next-auth/prisma-adapter": "^1.0.5",
|
||||
"@prisma/client": "^4.14.0",
|
||||
"@sendinblue/client": "^3.3.1",
|
||||
"@sentry/nextjs": "^7.61.0",
|
||||
"@t3-oss/env-nextjs": "^0.3.1",
|
||||
"@tabler/icons-react": "^2.22.0",
|
||||
"@tanstack/react-query": "^4.29.7",
|
||||
@@ -42,10 +50,12 @@
|
||||
"@trpc/react-query": "^10.26.0",
|
||||
"@trpc/server": "^10.26.0",
|
||||
"@vercel/og": "^0.5.9",
|
||||
"archiver": "^6.0.0",
|
||||
"ast-types": "^0.14.2",
|
||||
"chroma-js": "^2.4.2",
|
||||
"concurrently": "^8.2.0",
|
||||
"cors": "^2.8.5",
|
||||
"crypto-random-string": "^5.0.0",
|
||||
"dayjs": "^1.11.8",
|
||||
"dedent": "^1.0.1",
|
||||
"dotenv": "^16.3.1",
|
||||
@@ -53,33 +63,50 @@
|
||||
"framer-motion": "^10.12.17",
|
||||
"gpt-tokens": "^1.0.10",
|
||||
"graphile-worker": "^0.13.0",
|
||||
"human-id": "^4.0.0",
|
||||
"immer": "^10.0.2",
|
||||
"isolated-vm": "^4.5.0",
|
||||
"json-schema-to-typescript": "^13.0.2",
|
||||
"json-stringify-pretty-compact": "^4.0.0",
|
||||
"jsonschema": "^1.4.1",
|
||||
"kysely": "^0.26.1",
|
||||
"kysely-codegen": "^0.10.1",
|
||||
"llama-tokenizer-js": "^1.1.3",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "^0.265.0",
|
||||
"marked": "^7.0.3",
|
||||
"next": "^13.4.2",
|
||||
"next-auth": "^4.22.1",
|
||||
"next-query-params": "^4.2.3",
|
||||
"nextjs-cors": "^2.1.2",
|
||||
"nextjs-routes": "^2.0.1",
|
||||
"openai": "4.0.0-beta.2",
|
||||
"nodemailer": "^6.9.4",
|
||||
"openai": "4.0.0-beta.7",
|
||||
"openpipe": "0.4.0-beta.1",
|
||||
"openpipe-dev": "workspace:^",
|
||||
"pg": "^8.11.2",
|
||||
"pluralize": "^8.0.0",
|
||||
"posthog-js": "^1.68.4",
|
||||
"posthog-js": "^1.75.3",
|
||||
"posthog-node": "^3.1.1",
|
||||
"prettier": "^3.0.0",
|
||||
"prismjs": "^1.29.0",
|
||||
"react": "18.2.0",
|
||||
"react-diff-viewer": "^3.1.1",
|
||||
"react-dom": "18.2.0",
|
||||
"react-github-btn": "^1.4.0",
|
||||
"react-icons": "^4.10.1",
|
||||
"react-json-tree": "^0.18.0",
|
||||
"react-select": "^5.7.4",
|
||||
"react-syntax-highlighter": "^15.5.0",
|
||||
"react-textarea-autosize": "^8.5.0",
|
||||
"recast": "^0.23.3",
|
||||
"recharts": "^2.7.2",
|
||||
"replicate": "^0.12.3",
|
||||
"socket.io": "^4.7.1",
|
||||
"socket.io-client": "^4.7.1",
|
||||
"stream-buffers": "^3.0.2",
|
||||
"superjson": "1.12.2",
|
||||
"trpc-openapi": "^1.2.0",
|
||||
"tsx": "^3.12.7",
|
||||
"type-fest": "^4.0.0",
|
||||
"use-query-params": "^2.2.1",
|
||||
@@ -90,6 +117,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@openapi-contrib/openapi-schema-to-json-schema": "^4.0.5",
|
||||
"@types/archiver": "^5.3.2",
|
||||
"@types/babel__core": "^7.20.1",
|
||||
"@types/babel__standalone": "^7.1.4",
|
||||
"@types/chroma-js": "^2.4.0",
|
||||
@@ -99,11 +127,14 @@
|
||||
"@types/json-schema": "^7.0.12",
|
||||
"@types/lodash-es": "^4.17.8",
|
||||
"@types/node": "^18.16.0",
|
||||
"@types/nodemailer": "^6.4.9",
|
||||
"@types/pg": "^8.10.2",
|
||||
"@types/pluralize": "^0.0.30",
|
||||
"@types/prismjs": "^1.26.0",
|
||||
"@types/react": "^18.2.6",
|
||||
"@types/react-dom": "^18.2.4",
|
||||
"@types/react-syntax-highlighter": "^15.5.7",
|
||||
"@types/stream-buffers": "^3.0.4",
|
||||
"@types/uuid": "^9.0.2",
|
||||
"@typescript-eslint/eslint-plugin": "^5.59.6",
|
||||
"@typescript-eslint/parser": "^5.59.6",
|
||||
@@ -113,6 +144,7 @@
|
||||
"eslint-plugin-unused-imports": "^2.0.0",
|
||||
"monaco-editor": "^0.40.0",
|
||||
"openapi-typescript": "^6.3.4",
|
||||
"openapi-typescript-codegen": "^0.25.0",
|
||||
"prisma": "^4.14.0",
|
||||
"raw-loader": "^4.0.2",
|
||||
"typescript": "^5.0.4",
|
||||
12
app/prisma/deleteOneFineTune.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { prisma } from "~/server/db";
|
||||
|
||||
// delete most recent fineTune
|
||||
const mostRecentFineTune = await prisma.fineTune.findFirst({
|
||||
orderBy: { createdAt: "desc" },
|
||||
});
|
||||
|
||||
if (mostRecentFineTune) {
|
||||
await prisma.fineTune.delete({
|
||||
where: { id: mostRecentFineTune.id },
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "UserRole" AS ENUM ('ADMIN', 'USER');
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "User" ADD COLUMN "role" "UserRole" NOT NULL DEFAULT 'USER';
|
||||
@@ -0,0 +1,28 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Dataset" (
|
||||
"id" UUID NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"organizationId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "Dataset_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatasetEntry" (
|
||||
"id" UUID NOT NULL,
|
||||
"input" TEXT NOT NULL,
|
||||
"output" TEXT,
|
||||
"datasetId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "DatasetEntry_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Dataset" ADD CONSTRAINT "Dataset_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "DatasetEntry" ADD CONSTRAINT "DatasetEntry_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,13 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `constructFn` on the `PromptVariant` table. All the data in the column will be lost.
|
||||
- You are about to drop the column `constructFnVersion` on the `PromptVariant` table. All the data in the column will be lost.
|
||||
- Added the required column `promptConstructor` to the `PromptVariant` table without a default value. This is not possible if the table is not empty.
|
||||
- Added the required column `promptConstructorVersion` to the `PromptVariant` table without a default value. This is not possible if the table is not empty.
|
||||
|
||||
*/
|
||||
-- AlterTable
|
||||
|
||||
ALTER TABLE "PromptVariant" RENAME COLUMN "constructFn" TO "promptConstructor";
|
||||
ALTER TABLE "PromptVariant" RENAME COLUMN "constructFnVersion" TO "promptConstructorVersion";
|
||||
@@ -0,0 +1,90 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "LoggedCall" (
|
||||
"id" UUID NOT NULL,
|
||||
"startTime" TIMESTAMP(3) NOT NULL,
|
||||
"cacheHit" BOOLEAN NOT NULL,
|
||||
"modelResponseId" UUID NOT NULL,
|
||||
"organizationId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "LoggedCall_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "LoggedCallModelResponse" (
|
||||
"id" UUID NOT NULL,
|
||||
"reqPayload" JSONB NOT NULL,
|
||||
"respStatus" INTEGER,
|
||||
"respPayload" JSONB,
|
||||
"error" TEXT,
|
||||
"startTime" TIMESTAMP(3) NOT NULL,
|
||||
"endTime" TIMESTAMP(3) NOT NULL,
|
||||
"cacheKey" TEXT,
|
||||
"durationMs" INTEGER,
|
||||
"inputTokens" INTEGER,
|
||||
"outputTokens" INTEGER,
|
||||
"finishReason" TEXT,
|
||||
"completionId" TEXT,
|
||||
"totalCost" DECIMAL(18,12),
|
||||
"originalLoggedCallId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "LoggedCallModelResponse_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "LoggedCallTag" (
|
||||
"id" UUID NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT,
|
||||
"loggedCallId" UUID NOT NULL,
|
||||
|
||||
CONSTRAINT "LoggedCallTag_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApiKey" (
|
||||
"id" UUID NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"apiKey" TEXT NOT NULL,
|
||||
"organizationId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "ApiKey_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCall_startTime_idx" ON "LoggedCall"("startTime");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "LoggedCallModelResponse_originalLoggedCallId_key" ON "LoggedCallModelResponse"("originalLoggedCallId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCallModelResponse_cacheKey_idx" ON "LoggedCallModelResponse"("cacheKey");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCallTag_name_idx" ON "LoggedCallTag"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCallTag_name_value_idx" ON "LoggedCallTag"("name", "value");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApiKey_apiKey_key" ON "ApiKey"("apiKey");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LoggedCall" ADD CONSTRAINT "LoggedCall_modelResponseId_fkey" FOREIGN KEY ("modelResponseId") REFERENCES "LoggedCallModelResponse"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LoggedCall" ADD CONSTRAINT "LoggedCall_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LoggedCallModelResponse" ADD CONSTRAINT "LoggedCallModelResponse_originalLoggedCallId_fkey" FOREIGN KEY ("originalLoggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LoggedCallTag" ADD CONSTRAINT "LoggedCallTag_loggedCallId_fkey" FOREIGN KEY ("loggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ApiKey" ADD CONSTRAINT "ApiKey_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Organization" ADD COLUMN "name" TEXT NOT NULL DEFAULT 'Project 1';
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCall" ALTER COLUMN "modelResponseId" DROP NOT NULL;
|
||||
@@ -0,0 +1,37 @@
|
||||
-- Rename Enum
|
||||
ALTER TYPE "OrganizationUserRole" RENAME TO "ProjectUserRole";
|
||||
|
||||
-- Drop and recreate foreign keys
|
||||
ALTER TABLE "ApiKey" DROP CONSTRAINT "ApiKey_organizationId_fkey";
|
||||
ALTER TABLE "Dataset" DROP CONSTRAINT "Dataset_organizationId_fkey";
|
||||
ALTER TABLE "Experiment" DROP CONSTRAINT "Experiment_organizationId_fkey";
|
||||
ALTER TABLE "LoggedCall" DROP CONSTRAINT "LoggedCall_organizationId_fkey";
|
||||
ALTER TABLE "OrganizationUser" DROP CONSTRAINT "OrganizationUser_organizationId_fkey";
|
||||
ALTER TABLE "OrganizationUser" DROP CONSTRAINT "OrganizationUser_userId_fkey";
|
||||
|
||||
-- Rename columns
|
||||
ALTER TABLE "ApiKey" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "Dataset" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "Experiment" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "LoggedCall" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "OrganizationUser" RENAME COLUMN "organizationId" TO "projectId";
|
||||
ALTER TABLE "Organization" RENAME COLUMN "personalOrgUserId" TO "personalProjectUserId";
|
||||
|
||||
-- Rename table
|
||||
ALTER TABLE "Organization" RENAME TO "Project";
|
||||
ALTER TABLE "OrganizationUser" RENAME TO "ProjectUser";
|
||||
|
||||
-- Recreate foreign keys
|
||||
ALTER TABLE "Experiment" ADD CONSTRAINT "Experiment_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "Dataset" ADD CONSTRAINT "Dataset_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "ProjectUser" ADD CONSTRAINT "ProjectUser_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "ProjectUser" ADD CONSTRAINT "ProjectUser_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "LoggedCall" ADD CONSTRAINT "LoggedCall_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "ApiKey" ADD CONSTRAINT "ApiKey_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- Rename indexes
|
||||
ALTER TABLE "Project" RENAME CONSTRAINT "Organization_pkey" TO "Project_pkey";
|
||||
ALTER TABLE "ProjectUser" RENAME CONSTRAINT "OrganizationUser_pkey" TO "ProjectUser_pkey";
|
||||
ALTER TABLE "Project" RENAME CONSTRAINT "Organization_personalOrgUserId_fkey" TO "Project_personalProjectUserId_fkey";
|
||||
ALTER INDEX "Organization_personalOrgUserId_key" RENAME TO "Project_personalProjectUserId_key";
|
||||
ALTER INDEX "OrganizationUser_organizationId_userId_key" RENAME TO "ProjectUser_projectId_userId_key";
|
||||
@@ -0,0 +1 @@
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
@@ -0,0 +1,66 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to rename the column `completionTokens` to `outputTokens` on the `ModelResponse` table.
|
||||
- You are about to rename the column `promptTokens` to `inputTokens` on the `ModelResponse` table.
|
||||
- You are about to rename the column `startTime` on the `LoggedCall` table to `requestedAt`. Ensure compatibility with application logic.
|
||||
- You are about to rename the column `startTime` on the `LoggedCallModelResponse` table to `requestedAt`. Ensure compatibility with application logic.
|
||||
- You are about to rename the column `endTime` on the `LoggedCallModelResponse` table to `receivedAt`. Ensure compatibility with application logic.
|
||||
- You are about to rename the column `error` on the `LoggedCallModelResponse` table to `errorMessage`. Ensure compatibility with application logic.
|
||||
- You are about to rename the column `respStatus` on the `LoggedCallModelResponse` table to `statusCode`. Ensure compatibility with application logic.
|
||||
- You are about to rename the column `totalCost` on the `LoggedCallModelResponse` table to `cost`. Ensure compatibility with application logic.
|
||||
- You are about to rename the column `inputHash` on the `ModelResponse` table to `cacheKey`. Ensure compatibility with application logic.
|
||||
- You are about to rename the column `output` on the `ModelResponse` table to `respPayload`. Ensure compatibility with application logic.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "LoggedCall_startTime_idx";
|
||||
|
||||
-- DropIndex
|
||||
DROP INDEX "ModelResponse_inputHash_idx";
|
||||
|
||||
-- Rename completionTokens to outputTokens
|
||||
ALTER TABLE "ModelResponse"
|
||||
RENAME COLUMN "completionTokens" TO "outputTokens";
|
||||
|
||||
-- Rename promptTokens to inputTokens
|
||||
ALTER TABLE "ModelResponse"
|
||||
RENAME COLUMN "promptTokens" TO "inputTokens";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCall"
|
||||
RENAME COLUMN "startTime" TO "requestedAt";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCallModelResponse"
|
||||
RENAME COLUMN "startTime" TO "requestedAt";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCallModelResponse"
|
||||
RENAME COLUMN "endTime" TO "receivedAt";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCallModelResponse"
|
||||
RENAME COLUMN "error" TO "errorMessage";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCallModelResponse"
|
||||
RENAME COLUMN "respStatus" TO "statusCode";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCallModelResponse"
|
||||
RENAME COLUMN "totalCost" TO "cost";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ModelResponse"
|
||||
RENAME COLUMN "inputHash" TO "cacheKey";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ModelResponse"
|
||||
RENAME COLUMN "output" TO "respPayload";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCall_requestedAt_idx" ON "LoggedCall"("requestedAt");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "ModelResponse_cacheKey_idx" ON "ModelResponse"("cacheKey");
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCall" ADD COLUMN "model" TEXT;
|
||||
@@ -0,0 +1,22 @@
|
||||
-- DropIndex
|
||||
DROP INDEX "LoggedCallTag_name_idx";
|
||||
DROP INDEX "LoggedCallTag_name_value_idx";
|
||||
|
||||
-- AlterTable: Add projectId column without NOT NULL constraint for now
|
||||
ALTER TABLE "LoggedCallTag" ADD COLUMN "projectId" UUID;
|
||||
|
||||
-- Set the default value
|
||||
UPDATE "LoggedCallTag" lct
|
||||
SET "projectId" = lc."projectId"
|
||||
FROM "LoggedCall" lc
|
||||
WHERE lct."loggedCallId" = lc.id;
|
||||
|
||||
-- Now set the NOT NULL constraint
|
||||
ALTER TABLE "LoggedCallTag" ALTER COLUMN "projectId" SET NOT NULL;
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LoggedCallTag_projectId_name_idx" ON "LoggedCallTag"("projectId", "name");
|
||||
CREATE INDEX "LoggedCallTag_projectId_name_value_idx" ON "LoggedCallTag"("projectId", "name", "value");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "LoggedCallTag_loggedCallId_name_key" ON "LoggedCallTag"("loggedCallId", "name");
|
||||
@@ -0,0 +1,25 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "UserInvitation" (
|
||||
"id" UUID NOT NULL,
|
||||
"projectId" UUID NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"role" "ProjectUserRole" NOT NULL,
|
||||
"invitationToken" TEXT NOT NULL,
|
||||
"senderId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "UserInvitation_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "UserInvitation_invitationToken_key" ON "UserInvitation"("invitationToken");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "UserInvitation_projectId_email_key" ON "UserInvitation"("projectId", "email");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "UserInvitation" ADD CONSTRAINT "UserInvitation_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "UserInvitation" ADD CONSTRAINT "UserInvitation_senderId_fkey" FOREIGN KEY ("senderId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,88 @@
|
||||
/*
|
||||
* Copyright 2023 Viascom Ltd liab. Co
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
||||
|
||||
CREATE OR REPLACE FUNCTION nanoid(
|
||||
size int DEFAULT 21,
|
||||
alphabet text DEFAULT '_-0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
)
|
||||
RETURNS text
|
||||
LANGUAGE plpgsql
|
||||
volatile
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
idBuilder text := '';
|
||||
counter int := 0;
|
||||
bytes bytea;
|
||||
alphabetIndex int;
|
||||
alphabetArray text[];
|
||||
alphabetLength int;
|
||||
mask int;
|
||||
step int;
|
||||
BEGIN
|
||||
alphabetArray := regexp_split_to_array(alphabet, '');
|
||||
alphabetLength := array_length(alphabetArray, 1);
|
||||
mask := (2 << cast(floor(log(alphabetLength - 1) / log(2)) as int)) - 1;
|
||||
step := cast(ceil(1.6 * mask * size / alphabetLength) AS int);
|
||||
|
||||
while true
|
||||
loop
|
||||
bytes := gen_random_bytes(step);
|
||||
while counter < step
|
||||
loop
|
||||
alphabetIndex := (get_byte(bytes, counter) & mask) + 1;
|
||||
if alphabetIndex <= alphabetLength then
|
||||
idBuilder := idBuilder || alphabetArray[alphabetIndex];
|
||||
if length(idBuilder) = size then
|
||||
return idBuilder;
|
||||
end if;
|
||||
end if;
|
||||
counter := counter + 1;
|
||||
end loop;
|
||||
|
||||
counter := 0;
|
||||
end loop;
|
||||
END
|
||||
$$;
|
||||
|
||||
|
||||
-- Make a short_nanoid function that uses the default alphabet and length of 15
|
||||
CREATE OR REPLACE FUNCTION short_nanoid()
|
||||
RETURNS text
|
||||
LANGUAGE plpgsql
|
||||
volatile
|
||||
AS
|
||||
$$
|
||||
BEGIN
|
||||
RETURN nanoid(15, '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
|
||||
END
|
||||
$$;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "Experiment" ADD COLUMN "slug" TEXT NOT NULL DEFAULT short_nanoid();
|
||||
|
||||
-- For existing experiments, keep the existing id as the slug for backwards compatibility
|
||||
UPDATE "Experiment" SET "slug" = "id";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Experiment_slug_key" ON "Experiment"("slug");
|
||||
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `input` on the `DatasetEntry` table. All the data in the column will be lost.
|
||||
- You are about to drop the column `output` on the `DatasetEntry` table. All the data in the column will be lost.
|
||||
- Added the required column `loggedCallId` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
|
||||
|
||||
*/
|
||||
-- AlterTable
|
||||
ALTER TABLE "DatasetEntry" DROP COLUMN "input",
|
||||
DROP COLUMN "output",
|
||||
ADD COLUMN "loggedCallId" UUID NOT NULL;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "DatasetEntry" ADD CONSTRAINT "DatasetEntry_loggedCallId_fkey" FOREIGN KEY ("loggedCallId") REFERENCES "LoggedCall"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "LoggedCallModelResponse" ALTER COLUMN "cost" SET DATA TYPE DOUBLE PRECISION;
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "FineTuneStatus" AS ENUM ('PENDING', 'TRAINING', 'AWAITING_DEPLOYMENT', 'DEPLOYING', 'DEPLOYED', 'ERROR');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "FineTune" (
|
||||
"id" UUID NOT NULL,
|
||||
"slug" TEXT NOT NULL,
|
||||
"baseModel" TEXT NOT NULL,
|
||||
"status" "FineTuneStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"trainingStartedAt" TIMESTAMP(3),
|
||||
"trainingFinishedAt" TIMESTAMP(3),
|
||||
"deploymentStartedAt" TIMESTAMP(3),
|
||||
"deploymentFinishedAt" TIMESTAMP(3),
|
||||
"datasetId" UUID NOT NULL,
|
||||
"projectId" UUID NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "FineTune_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "FineTune_slug_key" ON "FineTune"("slug");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "FineTune" ADD CONSTRAINT "FineTune_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "FineTune" ADD CONSTRAINT "FineTune_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,26 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- Added the required column `inputTokens` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
|
||||
- Added the required column `outputTokens` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
|
||||
- Added the required column `type` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
|
||||
|
||||
*/
|
||||
-- CreateEnum
|
||||
CREATE TYPE "DatasetEntryType" AS ENUM ('TRAIN', 'TEST');
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "Dataset" ADD COLUMN "trainingRatio" DOUBLE PRECISION NOT NULL DEFAULT 0.8;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "DatasetEntry" ADD COLUMN "input" JSONB NOT NULL DEFAULT '[]',
|
||||
ADD COLUMN "inputTokens" INTEGER NOT NULL DEFAULT 0,
|
||||
ADD COLUMN "output" JSONB,
|
||||
ADD COLUMN "outputTokens" INTEGER NOT NULL DEFAULT 0,
|
||||
ADD COLUMN "type" "DatasetEntryType" NOT NULL DEFAULT 'TRAIN';
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "DatasetEntry_datasetId_createdAt_id_idx" ON "DatasetEntry"("datasetId", "createdAt", "id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "DatasetEntry_datasetId_type_idx" ON "DatasetEntry"("datasetId", "type");
|
||||
@@ -0,0 +1,5 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "DatasetEntry" ALTER COLUMN "loggedCallId" DROP NOT NULL,
|
||||
ALTER COLUMN "inputTokens" DROP DEFAULT,
|
||||
ALTER COLUMN "outputTokens" DROP DEFAULT,
|
||||
ALTER COLUMN "type" DROP DEFAULT;
|
||||
@@ -0,0 +1,23 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "DatasetFileUploadStatus" AS ENUM ('PENDING', 'DOWNLOADING', 'PROCESSING', 'SAVING', 'COMPLETE', 'ERROR');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatasetFileUpload" (
|
||||
"id" UUID NOT NULL,
|
||||
"datasetId" UUID NOT NULL,
|
||||
"blobName" TEXT NOT NULL,
|
||||
"fileName" TEXT NOT NULL,
|
||||
"fileSize" INTEGER NOT NULL,
|
||||
"progress" INTEGER NOT NULL DEFAULT 0,
|
||||
"status" "DatasetFileUploadStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"uploadedAt" TIMESTAMP(3) NOT NULL,
|
||||
"visible" BOOLEAN NOT NULL DEFAULT true,
|
||||
"errorMessage" TEXT,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "DatasetFileUpload_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "DatasetFileUpload" ADD CONSTRAINT "DatasetFileUpload_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
507
app/prisma/schema.prisma
Normal file
@@ -0,0 +1,507 @@
|
||||
// This is your Prisma schema file,
|
||||
// learn more about it in the docs: https://pris.ly/d/prisma-schema
|
||||
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "postgresql"
|
||||
url = env("DATABASE_URL")
|
||||
}
|
||||
|
||||
model Experiment {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
slug String @unique @default(dbgenerated("short_nanoid()"))
|
||||
label String
|
||||
|
||||
sortIndex Int @default(0)
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
templateVariables TemplateVariable[]
|
||||
promptVariants PromptVariant[]
|
||||
testScenarios TestScenario[]
|
||||
evaluations Evaluation[]
|
||||
}
|
||||
|
||||
model PromptVariant {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
label String
|
||||
promptConstructor String
|
||||
promptConstructorVersion Int
|
||||
model String
|
||||
modelProvider String
|
||||
|
||||
uiId String @default(uuid()) @db.Uuid
|
||||
visible Boolean @default(true)
|
||||
sortIndex Int @default(0)
|
||||
|
||||
experimentId String @db.Uuid
|
||||
experiment Experiment @relation(fields: [experimentId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
scenarioVariantCells ScenarioVariantCell[]
|
||||
|
||||
@@index([uiId])
|
||||
}
|
||||
|
||||
model TestScenario {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
variableValues Json
|
||||
|
||||
uiId String @default(uuid()) @db.Uuid
|
||||
visible Boolean @default(true)
|
||||
sortIndex Int @default(0)
|
||||
|
||||
experimentId String @db.Uuid
|
||||
experiment Experiment @relation(fields: [experimentId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
scenarioVariantCells ScenarioVariantCell[]
|
||||
}
|
||||
|
||||
model TemplateVariable {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
label String
|
||||
|
||||
experimentId String @db.Uuid
|
||||
experiment Experiment @relation(fields: [experimentId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
enum CellRetrievalStatus {
|
||||
PENDING
|
||||
IN_PROGRESS
|
||||
COMPLETE
|
||||
ERROR
|
||||
}
|
||||
|
||||
model ScenarioVariantCell {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
retrievalStatus CellRetrievalStatus @default(COMPLETE)
|
||||
jobQueuedAt DateTime?
|
||||
jobStartedAt DateTime?
|
||||
modelResponses ModelResponse[]
|
||||
errorMessage String? // Contains errors that occurred independently of model responses
|
||||
|
||||
promptVariantId String @db.Uuid
|
||||
promptVariant PromptVariant @relation(fields: [promptVariantId], references: [id], onDelete: Cascade)
|
||||
prompt Json?
|
||||
|
||||
testScenarioId String @db.Uuid
|
||||
testScenario TestScenario @relation(fields: [testScenarioId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([promptVariantId, testScenarioId])
|
||||
}
|
||||
|
||||
model ModelResponse {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
cacheKey String
|
||||
requestedAt DateTime?
|
||||
receivedAt DateTime?
|
||||
respPayload Json?
|
||||
cost Float?
|
||||
inputTokens Int?
|
||||
outputTokens Int?
|
||||
statusCode Int?
|
||||
errorMessage String?
|
||||
retryTime DateTime?
|
||||
outdated Boolean @default(false)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
scenarioVariantCellId String @db.Uuid
|
||||
scenarioVariantCell ScenarioVariantCell @relation(fields: [scenarioVariantCellId], references: [id], onDelete: Cascade)
|
||||
outputEvaluations OutputEvaluation[]
|
||||
|
||||
@@index([cacheKey])
|
||||
}
|
||||
|
||||
enum EvalType {
|
||||
CONTAINS
|
||||
DOES_NOT_CONTAIN
|
||||
GPT4_EVAL
|
||||
}
|
||||
|
||||
model Evaluation {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
label String
|
||||
evalType EvalType
|
||||
value String
|
||||
|
||||
experimentId String @db.Uuid
|
||||
experiment Experiment @relation(fields: [experimentId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
outputEvaluations OutputEvaluation[]
|
||||
}
|
||||
|
||||
model OutputEvaluation {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
// Number between 0 (fail) and 1 (pass)
|
||||
result Float
|
||||
details String?
|
||||
|
||||
modelResponseId String @db.Uuid
|
||||
modelResponse ModelResponse @relation(fields: [modelResponseId], references: [id], onDelete: Cascade)
|
||||
|
||||
evaluationId String @db.Uuid
|
||||
evaluation Evaluation @relation(fields: [evaluationId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([modelResponseId, evaluationId])
|
||||
}
|
||||
|
||||
|
||||
enum DatasetFileUploadStatus {
|
||||
PENDING
|
||||
DOWNLOADING
|
||||
PROCESSING
|
||||
SAVING
|
||||
COMPLETE
|
||||
ERROR
|
||||
}
|
||||
|
||||
model DatasetFileUpload {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
datasetId String @db.Uuid
|
||||
dataset Dataset @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
blobName String
|
||||
fileName String
|
||||
fileSize Int
|
||||
progress Int @default(0) // Percentage
|
||||
status DatasetFileUploadStatus @default(PENDING)
|
||||
uploadedAt DateTime
|
||||
visible Boolean @default(true)
|
||||
errorMessage String?
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Dataset {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
name String
|
||||
datasetEntries DatasetEntry[]
|
||||
fineTunes FineTune[]
|
||||
datasetFileUploads DatasetFileUpload[]
|
||||
trainingRatio Float @default(0.8)
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
enum DatasetEntryType {
|
||||
TRAIN
|
||||
TEST
|
||||
}
|
||||
|
||||
model DatasetEntry {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
loggedCallId String? @db.Uuid
|
||||
loggedCall LoggedCall? @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
|
||||
|
||||
input Json @default("[]")
|
||||
output Json?
|
||||
inputTokens Int
|
||||
outputTokens Int
|
||||
|
||||
type DatasetEntryType
|
||||
|
||||
datasetId String @db.Uuid
|
||||
dataset Dataset? @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([datasetId, createdAt, id])
|
||||
@@index([datasetId, type])
|
||||
}
|
||||
|
||||
model Project {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String @default("Project 1")
|
||||
|
||||
personalProjectUserId String? @unique @db.Uuid
|
||||
personalProjectUser User? @relation(fields: [personalProjectUserId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
projectUsers ProjectUser[]
|
||||
projectUserInvitations UserInvitation[]
|
||||
experiments Experiment[]
|
||||
datasets Dataset[]
|
||||
loggedCalls LoggedCall[]
|
||||
fineTunes FineTune[]
|
||||
apiKeys ApiKey[]
|
||||
}
|
||||
|
||||
enum ProjectUserRole {
|
||||
ADMIN
|
||||
MEMBER
|
||||
VIEWER
|
||||
}
|
||||
|
||||
model ProjectUser {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
role ProjectUserRole
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
userId String @db.Uuid
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([projectId, userId])
|
||||
}
|
||||
|
||||
model WorldChampEntrant {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
userId String @db.Uuid
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
approved Boolean @default(false)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([userId])
|
||||
}
|
||||
|
||||
model LoggedCall {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
requestedAt DateTime
|
||||
|
||||
// True if this call was served from the cache, false otherwise
|
||||
cacheHit Boolean
|
||||
|
||||
// A LoggedCall is always associated with a LoggedCallModelResponse. If this
|
||||
// is a cache miss, we create a new LoggedCallModelResponse.
|
||||
// If it's a cache hit, it's a pre-existing LoggedCallModelResponse.
|
||||
modelResponseId String? @db.Uuid
|
||||
modelResponse LoggedCallModelResponse? @relation(fields: [modelResponseId], references: [id], onDelete: Cascade)
|
||||
|
||||
// The responses created by this LoggedCall. Will be empty if this LoggedCall was a cache hit.
|
||||
createdResponses LoggedCallModelResponse[] @relation(name: "ModelResponseOriginalCall")
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
model String?
|
||||
tags LoggedCallTag[]
|
||||
datasetEntries DatasetEntry[]
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([requestedAt])
|
||||
}
|
||||
|
||||
model LoggedCallModelResponse {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
reqPayload Json
|
||||
|
||||
// The HTTP status returned by the model provider
|
||||
statusCode Int?
|
||||
respPayload Json?
|
||||
|
||||
// Should be null if the request was successful, and some string if the request failed.
|
||||
errorMessage String?
|
||||
|
||||
requestedAt DateTime
|
||||
receivedAt DateTime
|
||||
|
||||
// Note: the function to calculate the cacheKey should include the project
|
||||
// ID so we don't share cached responses between projects, which could be an
|
||||
// attack vector. Also, we should only set the cacheKey on the model if the
|
||||
// request was successful.
|
||||
cacheKey String?
|
||||
|
||||
// Derived fields
|
||||
durationMs Int?
|
||||
inputTokens Int?
|
||||
outputTokens Int?
|
||||
finishReason String?
|
||||
completionId String?
|
||||
cost Float?
|
||||
|
||||
// The LoggedCall that created this LoggedCallModelResponse
|
||||
originalLoggedCallId String @unique @db.Uuid
|
||||
originalLoggedCall LoggedCall @relation(name: "ModelResponseOriginalCall", fields: [originalLoggedCallId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
loggedCalls LoggedCall[]
|
||||
|
||||
@@index([cacheKey])
|
||||
}
|
||||
|
||||
model LoggedCallTag {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String
|
||||
value String?
|
||||
projectId String @db.Uuid
|
||||
|
||||
loggedCallId String @db.Uuid
|
||||
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([loggedCallId, name])
|
||||
@@index([projectId, name])
|
||||
@@index([projectId, name, value])
|
||||
}
|
||||
|
||||
model ApiKey {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
name String
|
||||
apiKey String @unique
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Account {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
userId String @db.Uuid
|
||||
type String
|
||||
provider String
|
||||
providerAccountId String
|
||||
refresh_token String? @db.Text
|
||||
refresh_token_expires_in Int?
|
||||
access_token String? @db.Text
|
||||
expires_at Int?
|
||||
token_type String?
|
||||
scope String?
|
||||
id_token String? @db.Text
|
||||
session_state String?
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([provider, providerAccountId])
|
||||
}
|
||||
|
||||
model Session {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
sessionToken String @unique
|
||||
userId String @db.Uuid
|
||||
expires DateTime
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
}
|
||||
|
||||
enum UserRole {
|
||||
ADMIN
|
||||
USER
|
||||
}
|
||||
|
||||
model User {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String?
|
||||
email String? @unique
|
||||
emailVerified DateTime?
|
||||
image String?
|
||||
|
||||
role UserRole @default(USER)
|
||||
|
||||
accounts Account[]
|
||||
sessions Session[]
|
||||
projectUsers ProjectUser[]
|
||||
projects Project[]
|
||||
worldChampEntrant WorldChampEntrant?
|
||||
sentUserInvitations UserInvitation[]
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @default(now()) @updatedAt
|
||||
}
|
||||
|
||||
model UserInvitation {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
email String
|
||||
role ProjectUserRole
|
||||
invitationToken String @unique
|
||||
senderId String @db.Uuid
|
||||
sender User @relation(fields: [senderId], references: [id], onDelete: Cascade)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([projectId, email])
|
||||
}
|
||||
|
||||
model VerificationToken {
|
||||
identifier String
|
||||
token String @unique
|
||||
expires DateTime
|
||||
|
||||
@@unique([identifier, token])
|
||||
}
|
||||
|
||||
enum FineTuneStatus {
|
||||
PENDING
|
||||
TRAINING
|
||||
AWAITING_DEPLOYMENT
|
||||
DEPLOYING
|
||||
DEPLOYED
|
||||
ERROR
|
||||
}
|
||||
|
||||
model FineTune {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
slug String @unique
|
||||
baseModel String
|
||||
status FineTuneStatus @default(PENDING)
|
||||
trainingStartedAt DateTime?
|
||||
trainingFinishedAt DateTime?
|
||||
deploymentStartedAt DateTime?
|
||||
deploymentFinishedAt DateTime?
|
||||
|
||||
datasetId String @db.Uuid
|
||||
dataset Dataset @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
@@ -1,20 +1,44 @@
|
||||
import { prisma } from "~/server/db";
|
||||
import dedent from "dedent";
|
||||
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||
import { promptConstructorVersion } from "~/promptConstructor/version";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
const defaultId = "11111111-1111-1111-1111-111111111111";
|
||||
|
||||
await prisma.organization.deleteMany({
|
||||
await prisma.project.deleteMany({
|
||||
where: { id: defaultId },
|
||||
});
|
||||
|
||||
// If there's an existing org, just seed into it
|
||||
const org =
|
||||
(await prisma.organization.findFirst({})) ??
|
||||
(await prisma.organization.create({
|
||||
// Mark all users as admins
|
||||
await prisma.user.updateMany({
|
||||
where: {},
|
||||
data: {
|
||||
role: "ADMIN",
|
||||
},
|
||||
});
|
||||
|
||||
// If there's an existing project, just seed into it
|
||||
const project =
|
||||
(await prisma.project.findFirst({})) ??
|
||||
(await prisma.project.create({
|
||||
data: { id: defaultId },
|
||||
}));
|
||||
|
||||
if (env.OPENPIPE_API_KEY) {
|
||||
await prisma.apiKey.upsert({
|
||||
where: {
|
||||
apiKey: env.OPENPIPE_API_KEY,
|
||||
},
|
||||
create: {
|
||||
projectId: project.id,
|
||||
name: "Default API Key",
|
||||
apiKey: env.OPENPIPE_API_KEY,
|
||||
},
|
||||
update: {},
|
||||
});
|
||||
}
|
||||
|
||||
await prisma.experiment.deleteMany({
|
||||
where: {
|
||||
id: defaultId,
|
||||
@@ -25,7 +49,7 @@ await prisma.experiment.create({
|
||||
data: {
|
||||
id: defaultId,
|
||||
label: "Country Capitals Example",
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -51,8 +75,8 @@ await prisma.promptVariant.createMany({
|
||||
sortIndex: 0,
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
modelProvider: "openai/ChatCompletion",
|
||||
constructFnVersion: 1,
|
||||
constructFn: dedent`
|
||||
promptConstructorVersion,
|
||||
promptConstructor: dedent`
|
||||
definePrompt("openai/ChatCompletion", {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
@@ -70,8 +94,8 @@ await prisma.promptVariant.createMany({
|
||||
sortIndex: 1,
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
modelProvider: "openai/ChatCompletion",
|
||||
constructFnVersion: 1,
|
||||
constructFn: dedent`
|
||||
promptConstructorVersion,
|
||||
promptConstructor: dedent`
|
||||
definePrompt("openai/ChatCompletion", {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
@@ -1,19 +1,19 @@
|
||||
import { prisma } from "~/server/db";
|
||||
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||
import dedent from "dedent";
|
||||
import { execSync } from "child_process";
|
||||
import fs from "fs";
|
||||
import { promptConstructorVersion } from "~/promptConstructor/version";
|
||||
|
||||
const defaultId = "11111111-1111-1111-1111-111111111112";
|
||||
|
||||
await prisma.organization.deleteMany({
|
||||
await prisma.project.deleteMany({
|
||||
where: { id: defaultId },
|
||||
});
|
||||
|
||||
// If there's an existing org, just seed into it
|
||||
const org =
|
||||
(await prisma.organization.findFirst({})) ??
|
||||
(await prisma.organization.create({
|
||||
// If there's an existing project, just seed into it
|
||||
const project =
|
||||
(await prisma.project.findFirst({})) ??
|
||||
(await prisma.project.create({
|
||||
data: { id: defaultId },
|
||||
}));
|
||||
|
||||
@@ -46,7 +46,7 @@ for (const dataset of datasets) {
|
||||
const oldExperiment = await prisma.experiment.findFirst({
|
||||
where: {
|
||||
label: experimentName,
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
if (oldExperiment) {
|
||||
@@ -59,7 +59,7 @@ for (const dataset of datasets) {
|
||||
data: {
|
||||
id: oldExperiment?.id ?? undefined,
|
||||
label: experimentName,
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -98,8 +98,8 @@ for (const dataset of datasets) {
|
||||
sortIndex: 0,
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
modelProvider: "openai/ChatCompletion",
|
||||
constructFnVersion: 1,
|
||||
constructFn: dedent`
|
||||
promptConstructorVersion,
|
||||
promptConstructor: dedent`
|
||||
definePrompt("openai/ChatCompletion", {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
421
app/prisma/seedDashboard.ts
Normal file
@@ -2,17 +2,18 @@ import { prisma } from "~/server/db";
|
||||
import dedent from "dedent";
|
||||
import fs from "fs";
|
||||
import { parse } from "csv-parse/sync";
|
||||
import { promptConstructorVersion } from "~/promptConstructor/version";
|
||||
|
||||
const defaultId = "11111111-1111-1111-1111-111111111112";
|
||||
|
||||
await prisma.organization.deleteMany({
|
||||
await prisma.project.deleteMany({
|
||||
where: { id: defaultId },
|
||||
});
|
||||
|
||||
// If there's an existing org, just seed into it
|
||||
const org =
|
||||
(await prisma.organization.findFirst({})) ??
|
||||
(await prisma.organization.create({
|
||||
// If there's an existing project, just seed into it
|
||||
const project =
|
||||
(await prisma.project.findFirst({})) ??
|
||||
(await prisma.project.create({
|
||||
data: { id: defaultId },
|
||||
}));
|
||||
|
||||
@@ -26,7 +27,7 @@ const experimentName = `Twitter Sentiment Analysis`;
|
||||
const oldExperiment = await prisma.experiment.findFirst({
|
||||
where: {
|
||||
label: experimentName,
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
if (oldExperiment) {
|
||||
@@ -39,7 +40,7 @@ const experiment = await prisma.experiment.create({
|
||||
data: {
|
||||
id: oldExperiment?.id ?? undefined,
|
||||
label: experimentName,
|
||||
organizationId: org.id,
|
||||
projectId: project.id,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -85,8 +86,8 @@ await prisma.promptVariant.createMany({
|
||||
sortIndex: 0,
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
modelProvider: "openai/ChatCompletion",
|
||||
constructFnVersion: 1,
|
||||
constructFn: dedent`
|
||||
promptConstructorVersion,
|
||||
promptConstructor: dedent`
|
||||
definePrompt("openai/ChatCompletion", {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
BIN
app/public/favicon.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
app/public/favicons/android-chrome-192x192.png
Normal file
|
After Width: | Height: | Size: 6.1 KiB |
BIN
app/public/favicons/android-chrome-512x512.png
Normal file
|
After Width: | Height: | Size: 49 KiB |
BIN
app/public/favicons/apple-touch-icon.png
Normal file
|
After Width: | Height: | Size: 5.3 KiB |
BIN
app/public/favicons/favicon-16x16.png
Normal file
|
After Width: | Height: | Size: 800 B |
BIN
app/public/favicons/favicon-32x32.png
Normal file
|
After Width: | Height: | Size: 1.3 KiB |
BIN
app/public/favicons/favicon.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
app/public/favicons/mstile-150x150.png
Normal file
|
After Width: | Height: | Size: 3.4 KiB |
@@ -9,10 +9,9 @@ Created by potrace 1.14, written by Peter Selinger 2001-2017
|
||||
</metadata>
|
||||
<g transform="translate(0.000000,550.000000) scale(0.100000,-0.100000)"
|
||||
fill="#000000" stroke="none">
|
||||
<path d="M813 5478 c-18 -13 -37 -36 -43 -52 -6 -19 -10 -236 -10 -603 0 -638
|
||||
-1 -626 65 -657 25 -12 67 -16 179 -16 l146 0 0 -2032 0 -2032 23 -33 c12 -18
|
||||
35 -37 51 -43 19 -7 539 -10 1528 -10 1663 0 1549 -5 1582 65 14 30 16 235 16
|
||||
2059 l0 2026 156 0 156 0 39 39 39 39 0 587 c0 651 1 638 -65 669 -30 14 -223
|
||||
16 -1932 16 l-1898 0 -32 -22z"/>
|
||||
<path d="M785 5474 l-25 -27 0 -622 0 -622 25 -27 24 -26 171 0 170 0 0 -2050
|
||||
0 -2051 25 -25 24 -24 1557 2 1556 3 19 24 c19 23 19 70 19 2072 l0 2049 169
|
||||
0 c165 0 169 1 195 25 l26 24 0 626 0 626 -26 24 -27 25 -1939 0 -1939 0 -24
|
||||
-26z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 858 B After Width: | Height: | Size: 755 B |
28
app/public/logo.svg
Normal file
@@ -0,0 +1,28 @@
|
||||
<svg width="398" height="550" viewBox="0 0 398 550" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M39 125H359V542C359 546.418 355.418 550 351 550H47C42.5817 550 39 546.418 39 542V125Z" fill="black"/>
|
||||
<path d="M0 8C0 3.58172 3.58172 0 8 0H390C394.418 0 398 3.58172 398 8V127C398 131.418 394.418 135 390 135H7.99999C3.58171 135 0 131.418 0 127V8Z" fill="black"/>
|
||||
<path d="M50 135H348V535C348 537.209 346.209 539 344 539H54C51.7909 539 50 537.209 50 535V135Z" fill="#FF5733"/>
|
||||
<path d="M11 14.0001C11 11.791 12.7909 10.0001 15 10.0001H384C386.209 10.0001 388 11.791 388 14.0001V120C388 122.209 386.209 124 384 124H15C12.7909 124 11 122.209 11 120V14.0001Z" fill="#FF5733"/>
|
||||
<path d="M11 14.0001C11 11.791 12.7909 10.0001 15 10.0001H384C386.209 10.0001 388 11.791 388 14.0001V120C388 122.209 386.209 124 384 124H15C12.7909 124 11 122.209 11 120V14.0001Z" fill="url(#paint0_linear_102_49)"/>
|
||||
<path d="M50 134H348V535C348 537.209 346.209 539 344 539H54C51.7909 539 50 537.209 50 535V134Z" fill="url(#paint1_linear_102_49)"/>
|
||||
<path d="M108 142H156V535H108V142Z" fill="white"/>
|
||||
<path d="M300 135H348V535C348 537.209 346.209 539 344 539H300V135Z" fill="white" fill-opacity="0.25"/>
|
||||
<path d="M96 142H108V535H96V142Z" fill="white" fill-opacity="0.5"/>
|
||||
<path d="M84 10.0001H133V120H84V10.0001Z" fill="white"/>
|
||||
<path d="M339 10.0001H384C386.209 10.0001 388 11.791 388 14.0001V120C388 122.209 386.209 124 384 124H339V10.0001Z" fill="white" fill-opacity="0.25"/>
|
||||
<path d="M71.9995 10.0001H83.9995V120H71.9995V10.0001Z" fill="white" fill-opacity="0.5"/>
|
||||
<path d="M108 534.529H156V539.019H108V534.529Z" fill="#AAAAAA"/>
|
||||
<path opacity="0.5" d="M95.9927 534.529H107.982V539.019H95.9927V534.529Z" fill="#AAAAAA"/>
|
||||
<path d="M84.0029 119.887H133.007V124.027H84.0029V119.887Z" fill="#AAAAAA"/>
|
||||
<path opacity="0.5" d="M71.9883 119.887H83.978V124.027H71.9883V119.887Z" fill="#AAAAAA"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_102_49" x1="335" y1="67.0001" x2="137" y2="67.0001" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#D62600"/>
|
||||
<stop offset="1" stop-color="#FF5733" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear_102_49" x1="306.106" y1="336.5" x2="149.597" y2="336.5" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#D62600"/>
|
||||
<stop offset="1" stop-color="#FF5733" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.3 KiB |
BIN
app/public/og.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
6
app/scripts/debug-prod.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")/.."
|
||||
apt-get update
|
||||
apt-get install -y htop psql
|
||||
@@ -5,8 +5,9 @@ set -e
|
||||
echo "Migrating the database"
|
||||
pnpm prisma migrate deploy
|
||||
|
||||
echo "Migrating promptConstructors"
|
||||
pnpm tsx src/promptConstructor/migrate.ts
|
||||
|
||||
echo "Starting the server"
|
||||
|
||||
pnpm concurrently --kill-others \
|
||||
"pnpm start" \
|
||||
"pnpm tsx src/server/tasks/worker.ts"
|
||||
pnpm start
|
||||
10
app/scripts/run-workers-prod.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "Migrating the database"
|
||||
pnpm prisma migrate deploy
|
||||
|
||||
echo "Starting 4 workers"
|
||||
|
||||
pnpm concurrently "pnpm worker" "pnpm worker" "pnpm worker" "pnpm worker"
|
||||
13
app/scripts/test-docker.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "Env is"
|
||||
echo $ENVIRONMENT
|
||||
|
||||
docker build . --file app/Dockerfile --tag "openpipe-prod"
|
||||
|
||||
# Run the image
|
||||
docker run --env-file app/.env -it --entrypoint "/bin/bash" "openpipe-prod"
|
||||
33
app/sentry.client.config.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
// This file configures the initialization of Sentry on the client.
|
||||
// The config you add here will be used whenever a users loads a page in their browser.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
if (env.NEXT_PUBLIC_SENTRY_DSN) {
|
||||
Sentry.init({
|
||||
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
|
||||
// Adjust this value in production, or use tracesSampler for greater control
|
||||
tracesSampleRate: 1,
|
||||
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
|
||||
replaysOnErrorSampleRate: 1.0,
|
||||
|
||||
// This sets the sample rate to be 10%. You may want this to be 100% while
|
||||
// in development and sample at a lower rate in production
|
||||
replaysSessionSampleRate: 0.1,
|
||||
|
||||
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
|
||||
integrations: [
|
||||
new Sentry.Replay({
|
||||
// Additional Replay configuration goes in here, for example:
|
||||
maskAllText: true,
|
||||
blockAllMedia: true,
|
||||
}),
|
||||
],
|
||||
});
|
||||
}
|
||||
19
app/sentry.edge.config.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
// This file configures the initialization of Sentry for edge features (middleware, edge routes, and so on).
|
||||
// The config you add here will be used whenever one of the edge features is loaded.
|
||||
// Note that this config is unrelated to the Vercel Edge Runtime and is also required when running locally.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
if (env.NEXT_PUBLIC_SENTRY_DSN) {
|
||||
Sentry.init({
|
||||
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
|
||||
// Adjust this value in production, or use tracesSampler for greater control
|
||||
tracesSampleRate: 1,
|
||||
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
});
|
||||
}
|
||||
25
app/sentry.server.config.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
// This file configures the initialization of Sentry on the server.
|
||||
// The config you add here will be used whenever the server handles a request.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { isError } from "lodash-es";
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
if (env.NEXT_PUBLIC_SENTRY_DSN) {
|
||||
Sentry.init({
|
||||
dsn: env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
|
||||
// Adjust this value in production, or use tracesSampler for greater control
|
||||
tracesSampleRate: 1,
|
||||
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
});
|
||||
} else {
|
||||
// Install local debug exception handler for rejected promises
|
||||
process.on("unhandledRejection", (reason) => {
|
||||
const reasonDetails = isError(reason) ? reason?.stack : reason;
|
||||
console.log("Unhandled Rejection at:", reasonDetails);
|
||||
});
|
||||
}
|
||||
55
app/src/components/ActionButton.tsx
Normal file
@@ -0,0 +1,55 @@
|
||||
import { useState } from "react";
|
||||
|
||||
import { Button, HStack, type ButtonProps, Icon, Text } from "@chakra-ui/react";
|
||||
import { type IconType } from "react-icons";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { BetaModal } from "./BetaModal";
|
||||
|
||||
const ActionButton = ({
|
||||
icon,
|
||||
iconBoxSize = 3.5,
|
||||
label,
|
||||
requireBeta = false,
|
||||
onClick,
|
||||
...buttonProps
|
||||
}: {
|
||||
icon: IconType;
|
||||
iconBoxSize?: number;
|
||||
label: string;
|
||||
requireBeta?: boolean;
|
||||
onClick?: () => void;
|
||||
} & ButtonProps) => {
|
||||
const flags = useAppStore((s) => s.featureFlags.featureFlags);
|
||||
const flagsLoaded = useAppStore((s) => s.featureFlags.flagsLoaded);
|
||||
|
||||
const [betaModalOpen, setBetaModalOpen] = useState(false);
|
||||
|
||||
const isBetaBlocked = requireBeta && flagsLoaded && !flags.betaAccess;
|
||||
return (
|
||||
<>
|
||||
<Button
|
||||
colorScheme="blue"
|
||||
color="black"
|
||||
bgColor="white"
|
||||
borderColor="gray.300"
|
||||
borderRadius={4}
|
||||
variant="outline"
|
||||
size="sm"
|
||||
fontSize="sm"
|
||||
fontWeight="normal"
|
||||
onClick={isBetaBlocked ? () => setBetaModalOpen(true) : onClick}
|
||||
{...buttonProps}
|
||||
>
|
||||
<HStack spacing={1}>
|
||||
{icon && (
|
||||
<Icon as={icon} boxSize={iconBoxSize} color={requireBeta ? "orange.400" : undefined} />
|
||||
)}
|
||||
<Text display={{ base: "none", md: "flex" }}>{label}</Text>
|
||||
</HStack>
|
||||
</Button>
|
||||
<BetaModal isOpen={betaModalOpen} onClose={() => setBetaModalOpen(false)} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ActionButton;
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Textarea, type TextareaProps } from "@chakra-ui/react";
|
||||
import ResizeTextarea from "react-textarea-autosize";
|
||||
import React, { useLayoutEffect, useState } from "react";
|
||||
import React, { useEffect, useState } from "react";
|
||||
|
||||
export const AutoResizeTextarea: React.ForwardRefRenderFunction<
|
||||
HTMLTextAreaElement,
|
||||
TextareaProps & { minRows?: number }
|
||||
> = ({ minRows = 1, overflowY = "hidden", ...props }, ref) => {
|
||||
const [isRerendered, setIsRerendered] = useState(false);
|
||||
useLayoutEffect(() => setIsRerendered(true), []);
|
||||
useEffect(() => setIsRerendered(true), []);
|
||||
|
||||
return (
|
||||
<Textarea
|
||||
65
app/src/components/BetaModal.tsx
Normal file
@@ -0,0 +1,65 @@
|
||||
import {
|
||||
Button,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
ModalOverlay,
|
||||
VStack,
|
||||
Text,
|
||||
HStack,
|
||||
Icon,
|
||||
Link,
|
||||
} from "@chakra-ui/react";
|
||||
import { BsStars } from "react-icons/bs";
|
||||
import { useSession } from "next-auth/react";
|
||||
|
||||
export const BetaModal = ({ isOpen, onClose }: { isOpen: boolean; onClose: () => void }) => {
|
||||
const session = useSession();
|
||||
|
||||
const email = session.data?.user.email ?? "";
|
||||
|
||||
return (
|
||||
<Modal
|
||||
isOpen={isOpen}
|
||||
onClose={onClose}
|
||||
closeOnOverlayClick={false}
|
||||
size={{ base: "xl", md: "2xl" }}
|
||||
>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={BsStars} />
|
||||
<Text>Beta-Only Feature</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
<ModalBody maxW="unset">
|
||||
<VStack spacing={8} py={4} alignItems="flex-start">
|
||||
<Text fontSize="md">
|
||||
This feature is currently in beta. To receive early access to beta-only features, join
|
||||
the waitlist. You'll receive an email at <b>{email}</b> when you're approved.
|
||||
</Text>
|
||||
</VStack>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<HStack spacing={4}>
|
||||
<Button
|
||||
as={Link}
|
||||
textDecoration="none !important"
|
||||
colorScheme="orange"
|
||||
target="_blank"
|
||||
href={`https://ax3nafkw0jp.typeform.com/to/ZNpYqvAc#email=${email}`}
|
||||
>
|
||||
Join Waitlist
|
||||
</Button>
|
||||
<Button colorScheme="blue" onClick={onClose}>
|
||||
Done
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
@@ -1,3 +1,4 @@
|
||||
import { useState, useMemo, useCallback } from "react";
|
||||
import {
|
||||
Button,
|
||||
HStack,
|
||||
@@ -14,16 +15,18 @@ import {
|
||||
VStack,
|
||||
} from "@chakra-ui/react";
|
||||
import { type PromptVariant } from "@prisma/client";
|
||||
import { isObject, isString } from "lodash-es";
|
||||
import { useState } from "react";
|
||||
import { isString } from "lodash-es";
|
||||
import { RiExchangeFundsFill } from "react-icons/ri";
|
||||
|
||||
import { type ProviderModel } from "~/modelProviders/types";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment, useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { lookupModel, modelLabel } from "~/utils/utils";
|
||||
import CompareFunctions from "../RefinePromptModal/CompareFunctions";
|
||||
import { ModelSearch } from "./ModelSearch";
|
||||
import { ModelStatsCard } from "./ModelStatsCard";
|
||||
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
export const ChangeModelModal = ({
|
||||
variant,
|
||||
@@ -32,48 +35,43 @@ export const ChangeModelModal = ({
|
||||
variant: PromptVariant;
|
||||
onClose: () => void;
|
||||
}) => {
|
||||
const editorOptionsMap = useAppStore((s) => s.sharedVariantEditor.editorOptionsMap);
|
||||
const originalPromptFn = useMemo(
|
||||
() => editorOptionsMap[variant.uiId]?.getContent() || "",
|
||||
[editorOptionsMap, variant.uiId],
|
||||
);
|
||||
|
||||
const originalModel = lookupModel(variant.modelProvider, variant.model);
|
||||
const [selectedModel, setSelectedModel] = useState({
|
||||
provider: variant.modelProvider,
|
||||
model: variant.model,
|
||||
} as ProviderModel);
|
||||
const [convertedModel, setConvertedModel] = useState<ProviderModel | undefined>();
|
||||
const visibleScenarios = useVisibleScenarioIds();
|
||||
|
||||
const utils = api.useContext();
|
||||
const [modifiedPromptFn, setModifiedPromptFn] = useState<string>();
|
||||
|
||||
const experiment = useExperiment();
|
||||
|
||||
const { mutateAsync: getModifiedPromptMutateAsync, data: modifiedPromptFn } =
|
||||
const { mutateAsync: getModifiedPromptMutateAsync } =
|
||||
api.promptVariants.getModifiedPromptFn.useMutation();
|
||||
|
||||
const [getModifiedPromptFn, modificationInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!experiment) return;
|
||||
|
||||
await getModifiedPromptMutateAsync({
|
||||
const resp = await getModifiedPromptMutateAsync({
|
||||
id: variant.id,
|
||||
originalPromptFn,
|
||||
newModel: selectedModel,
|
||||
});
|
||||
if (maybeReportError(resp)) return;
|
||||
setModifiedPromptFn(resp.payload);
|
||||
setConvertedModel(selectedModel);
|
||||
}, [getModifiedPromptMutateAsync, onClose, experiment, variant, selectedModel]);
|
||||
|
||||
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
|
||||
|
||||
const [replaceVariant, replacementInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (
|
||||
!variant.experimentId ||
|
||||
!modifiedPromptFn ||
|
||||
(isObject(modifiedPromptFn) && "status" in modifiedPromptFn)
|
||||
)
|
||||
return;
|
||||
await replaceVariantMutation.mutateAsync({
|
||||
id: variant.id,
|
||||
constructFn: modifiedPromptFn,
|
||||
streamScenarios: visibleScenarios,
|
||||
});
|
||||
await utils.promptVariants.list.invalidate();
|
||||
const replaceVariant = useCallback(() => {
|
||||
if (!modifiedPromptFn) return;
|
||||
editorOptionsMap[variant.uiId]?.setContent(modifiedPromptFn);
|
||||
onClose();
|
||||
}, [replaceVariantMutation, variant, onClose, modifiedPromptFn]);
|
||||
}, [variant.uiId, editorOptionsMap, onClose, modifiedPromptFn]);
|
||||
|
||||
const originalLabel = modelLabel(variant.modelProvider, variant.model);
|
||||
const selectedLabel = modelLabel(selectedModel.provider, selectedModel.model);
|
||||
@@ -107,7 +105,7 @@ export const ChangeModelModal = ({
|
||||
<ModelSearch selectedModel={selectedModel} setSelectedModel={setSelectedModel} />
|
||||
{isString(modifiedPromptFn) && (
|
||||
<CompareFunctions
|
||||
originalFunction={variant.constructFn}
|
||||
originalFunction={variant.promptConstructor}
|
||||
newFunction={modifiedPromptFn}
|
||||
leftTitle={originalLabel}
|
||||
rightTitle={convertedLabel}
|
||||
@@ -130,9 +128,9 @@ export const ChangeModelModal = ({
|
||||
colorScheme="blue"
|
||||
onClick={replaceVariant}
|
||||
minW={24}
|
||||
isDisabled={!convertedModel || modificationInProgress || replacementInProgress}
|
||||
isDisabled={!convertedModel || modificationInProgress}
|
||||
>
|
||||
{replacementInProgress ? <Spinner boxSize={4} /> : <Text>Accept</Text>}
|
||||
Accept
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
@@ -22,8 +22,8 @@ export const ModelSearch = (props: {
|
||||
const [containerRef, containerDimensions] = useElementDimensions();
|
||||
|
||||
return (
|
||||
<VStack ref={containerRef as LegacyRef<HTMLDivElement>} w="full">
|
||||
<Text>Browse Models</Text>
|
||||
<VStack ref={containerRef as LegacyRef<HTMLDivElement>} w="full" fontFamily="inconsolata">
|
||||
<Text fontWeight="bold">Browse Models</Text>
|
||||
<Select<ProviderModel>
|
||||
styles={{ control: (provided) => ({ ...provided, width: containerDimensions?.width }) }}
|
||||
getOptionLabel={(data) => modelLabel(data.provider, data.model)}
|
||||
@@ -23,16 +23,24 @@ export const ModelStatsCard = ({
|
||||
{label}
|
||||
</Text>
|
||||
|
||||
<VStack w="full" spacing={6} bgColor="gray.100" p={4} borderRadius={4}>
|
||||
<VStack
|
||||
w="full"
|
||||
spacing={6}
|
||||
borderWidth={1}
|
||||
borderColor="gray.300"
|
||||
p={4}
|
||||
borderRadius={8}
|
||||
fontFamily="inconsolata"
|
||||
>
|
||||
<HStack w="full" align="flex-start">
|
||||
<Text flex={1} fontSize="lg">
|
||||
<Text as="span" color="gray.600">
|
||||
{model.provider} /{" "}
|
||||
</Text>
|
||||
<VStack flex={1} fontSize="lg" alignItems="flex-start">
|
||||
<Text as="span" fontWeight="bold" color="gray.900">
|
||||
{model.name}
|
||||
</Text>
|
||||
</Text>
|
||||
<Text as="span" color="gray.600" fontSize="sm">
|
||||
Provider: {model.provider}
|
||||
</Text>
|
||||
</VStack>
|
||||
<Link
|
||||
href={model.learnMoreUrl}
|
||||
isExternal
|
||||
@@ -79,7 +87,7 @@ export const ModelStatsCard = ({
|
||||
label="Price"
|
||||
info={
|
||||
<Text>
|
||||
${model.pricePerSecond.toFixed(3)}
|
||||
${model.pricePerSecond.toFixed(4)}
|
||||
<Text color="gray.500"> / second</Text>
|
||||
</Text>
|
||||
}
|
||||
51
app/src/components/CopiableCode.tsx
Normal file
@@ -0,0 +1,51 @@
|
||||
import { HStack, Icon, IconButton, Tooltip, Text, type StackProps } from "@chakra-ui/react";
|
||||
import { useState } from "react";
|
||||
import { MdContentCopy } from "react-icons/md";
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
|
||||
const CopiableCode = ({ code, ...rest }: { code: string } & StackProps) => {
|
||||
const [copied, setCopied] = useState(false);
|
||||
|
||||
const [copyToClipboard] = useHandledAsyncCallback(async () => {
|
||||
await navigator.clipboard.writeText(code);
|
||||
setCopied(true);
|
||||
}, [code]);
|
||||
|
||||
return (
|
||||
<HStack
|
||||
backgroundColor="blackAlpha.800"
|
||||
color="white"
|
||||
borderRadius={4}
|
||||
padding={3}
|
||||
w="full"
|
||||
justifyContent="space-between"
|
||||
alignItems="flex-start"
|
||||
{...rest}
|
||||
>
|
||||
<Text
|
||||
fontFamily="inconsolata"
|
||||
fontWeight="bold"
|
||||
letterSpacing={0.5}
|
||||
overflowX="auto"
|
||||
whiteSpace="pre-wrap"
|
||||
>
|
||||
{code}
|
||||
{/* Necessary for trailing newline to actually be displayed */}
|
||||
{code.endsWith("\n") ? "\n" : ""}
|
||||
</Text>
|
||||
<Tooltip closeOnClick={false} label={copied ? "Copied!" : "Copy to clipboard"}>
|
||||
<IconButton
|
||||
aria-label="Copy"
|
||||
icon={<Icon as={MdContentCopy} boxSize={5} />}
|
||||
size="xs"
|
||||
colorScheme="white"
|
||||
variant="ghost"
|
||||
onClick={copyToClipboard}
|
||||
onMouseLeave={() => setCopied(false)}
|
||||
/>
|
||||
</Tooltip>
|
||||
</HStack>
|
||||
);
|
||||
};
|
||||
|
||||
export default CopiableCode;
|
||||
@@ -1,18 +1,29 @@
|
||||
import { Button, Spinner, InputGroup, InputRightElement, Icon, HStack } from "@chakra-ui/react";
|
||||
import {
|
||||
Button,
|
||||
Spinner,
|
||||
InputGroup,
|
||||
InputRightElement,
|
||||
Icon,
|
||||
HStack,
|
||||
type InputGroupProps,
|
||||
} from "@chakra-ui/react";
|
||||
import { IoMdSend } from "react-icons/io";
|
||||
import AutoResizeTextArea from "../AutoResizeTextArea";
|
||||
import AutoResizeTextArea from "./AutoResizeTextArea";
|
||||
|
||||
export const CustomInstructionsInput = ({
|
||||
instructions,
|
||||
setInstructions,
|
||||
loading,
|
||||
onSubmit,
|
||||
placeholder = "Send custom instructions",
|
||||
...props
|
||||
}: {
|
||||
instructions: string;
|
||||
setInstructions: (instructions: string) => void;
|
||||
loading: boolean;
|
||||
onSubmit: () => void;
|
||||
}) => {
|
||||
placeholder?: string;
|
||||
} & InputGroupProps) => {
|
||||
return (
|
||||
<InputGroup
|
||||
size="md"
|
||||
@@ -22,6 +33,7 @@ export const CustomInstructionsInput = ({
|
||||
borderRadius={8}
|
||||
alignItems="center"
|
||||
colorScheme="orange"
|
||||
{...props}
|
||||
>
|
||||
<AutoResizeTextArea
|
||||
value={instructions}
|
||||
@@ -33,7 +45,7 @@ export const CustomInstructionsInput = ({
|
||||
onSubmit();
|
||||
}
|
||||
}}
|
||||
placeholder="Send custom instructions"
|
||||
placeholder={placeholder}
|
||||
py={4}
|
||||
pl={4}
|
||||
pr={12}
|
||||
@@ -8,8 +8,8 @@ export default function Favicon() {
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/favicons/favicon-16x16.png" />
|
||||
<link rel="manifest" href="/favicons/site.webmanifest" />
|
||||
<link rel="shortcut icon" href="/favicons/favicon.ico" />
|
||||
<link rel="mask-icon" href="/favicons/safari-pinned-tab.svg" color="#5bbad5" />
|
||||
<meta name="msapplication-TileColor" content="#da532c" />
|
||||
<meta name="msapplication-config" content="/favicons/browserconfig.xml" />
|
||||
<meta name="theme-color" content="#ffffff" />
|
||||
</Head>
|
||||
);
|
||||
55
app/src/components/FormattedJson.tsx
Normal file
@@ -0,0 +1,55 @@
|
||||
import { Box, IconButton, useToast } from "@chakra-ui/react";
|
||||
import { CopyIcon } from "lucide-react";
|
||||
import SyntaxHighlighter from "react-syntax-highlighter";
|
||||
import { atelierCaveLight } from "react-syntax-highlighter/dist/cjs/styles/hljs";
|
||||
import stringify from "json-stringify-pretty-compact";
|
||||
|
||||
const FormattedJson = ({ json }: { json: any }) => {
|
||||
const jsonString = stringify(json, { maxLength: 40 });
|
||||
const toast = useToast();
|
||||
|
||||
const copyToClipboard = async (text: string) => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(text);
|
||||
toast({
|
||||
title: "Copied to clipboard",
|
||||
status: "success",
|
||||
duration: 2000,
|
||||
});
|
||||
} catch (err) {
|
||||
toast({
|
||||
title: "Failed to copy to clipboard",
|
||||
status: "error",
|
||||
duration: 2000,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Box position="relative" fontSize="sm" borderRadius="md" overflow="hidden">
|
||||
<SyntaxHighlighter
|
||||
customStyle={{ overflowX: "unset" }}
|
||||
language="json"
|
||||
style={atelierCaveLight}
|
||||
lineProps={{
|
||||
style: { wordBreak: "break-all", whiteSpace: "pre-wrap" },
|
||||
}}
|
||||
wrapLines
|
||||
>
|
||||
{jsonString}
|
||||
</SyntaxHighlighter>
|
||||
<IconButton
|
||||
aria-label="Copy"
|
||||
icon={<CopyIcon />}
|
||||
position="absolute"
|
||||
top={1}
|
||||
right={1}
|
||||
size="xs"
|
||||
variant="ghost"
|
||||
onClick={() => void copyToClipboard(jsonString)}
|
||||
/>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export { FormattedJson };
|
||||
14
app/src/components/InfoCircle.tsx
Normal file
@@ -0,0 +1,14 @@
|
||||
import { Tooltip, Icon, VStack } from "@chakra-ui/react";
|
||||
import { RiInformationFill } from "react-icons/ri";
|
||||
|
||||
const InfoCircle = ({ tooltipText }: { tooltipText: string }) => {
|
||||
return (
|
||||
<Tooltip label={tooltipText} fontSize="sm" shouldWrapChildren maxW={80}>
|
||||
<VStack>
|
||||
<Icon as={RiInformationFill} boxSize={5} color="gray.500" />
|
||||
</VStack>
|
||||
</Tooltip>
|
||||
);
|
||||
};
|
||||
|
||||
export default InfoCircle;
|
||||
100
app/src/components/InputDropdown.tsx
Normal file
@@ -0,0 +1,100 @@
|
||||
import {
|
||||
Input,
|
||||
InputGroup,
|
||||
InputRightElement,
|
||||
Icon,
|
||||
Popover,
|
||||
PopoverTrigger,
|
||||
PopoverContent,
|
||||
VStack,
|
||||
HStack,
|
||||
Button,
|
||||
Text,
|
||||
useDisclosure,
|
||||
type InputGroupProps,
|
||||
} from "@chakra-ui/react";
|
||||
|
||||
import { FiChevronDown } from "react-icons/fi";
|
||||
import { BiCheck } from "react-icons/bi";
|
||||
import { isEqual } from "lodash-es";
|
||||
import React from "react";
|
||||
|
||||
type InputDropdownProps<T> = {
|
||||
options: ReadonlyArray<T>;
|
||||
selectedOption: T;
|
||||
onSelect: (option: T) => void;
|
||||
inputGroupProps?: InputGroupProps;
|
||||
getDisplayLabel?: (option: T) => string;
|
||||
isDisabled?: boolean;
|
||||
};
|
||||
|
||||
const InputDropdown = <T,>({
|
||||
options,
|
||||
selectedOption,
|
||||
onSelect,
|
||||
inputGroupProps,
|
||||
getDisplayLabel = (option) => option as string,
|
||||
isDisabled,
|
||||
}: InputDropdownProps<T>) => {
|
||||
const { onOpen, ...popover } = useDisclosure();
|
||||
|
||||
return (
|
||||
<Popover placement="bottom-start" onOpen={isDisabled ? undefined : onOpen} {...popover}>
|
||||
<PopoverTrigger>
|
||||
<InputGroup
|
||||
cursor="pointer"
|
||||
w={getDisplayLabel(selectedOption).length * 14 + 180}
|
||||
{...inputGroupProps}
|
||||
>
|
||||
<Input
|
||||
value={getDisplayLabel(selectedOption)}
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function -- controlled input requires onChange
|
||||
onChange={() => {}}
|
||||
cursor="pointer"
|
||||
borderColor={popover.isOpen ? "blue.500" : undefined}
|
||||
_hover={popover.isOpen ? { borderColor: "blue.500" } : undefined}
|
||||
contentEditable={false}
|
||||
// disable focus
|
||||
onFocus={(e) => {
|
||||
e.target.blur();
|
||||
}}
|
||||
isDisabled={isDisabled}
|
||||
/>
|
||||
<InputRightElement>
|
||||
<Icon as={FiChevronDown} color={isDisabled ? "gray.300" : undefined} />
|
||||
</InputRightElement>
|
||||
</InputGroup>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);" minW={0} w="auto">
|
||||
<VStack spacing={0}>
|
||||
{options?.map((option, index) => (
|
||||
<HStack
|
||||
key={index}
|
||||
as={Button}
|
||||
onClick={() => {
|
||||
onSelect(option);
|
||||
popover.onClose();
|
||||
}}
|
||||
w="full"
|
||||
variant="ghost"
|
||||
justifyContent="space-between"
|
||||
fontWeight="semibold"
|
||||
borderRadius={0}
|
||||
colorScheme="blue"
|
||||
color="black"
|
||||
fontSize="sm"
|
||||
borderBottomWidth={1}
|
||||
>
|
||||
<Text mr={16}>{getDisplayLabel(option)}</Text>
|
||||
{isEqual(option, selectedOption) && (
|
||||
<Icon as={BiCheck} color="blue.500" boxSize={5} />
|
||||
)}
|
||||
</HStack>
|
||||
))}
|
||||
</VStack>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
|
||||
export default InputDropdown;
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
useHandledAsyncCallback,
|
||||
useVisibleScenarioIds,
|
||||
} from "~/utils/hooks";
|
||||
import { cellPadding } from "../constants";
|
||||
import { cellPadding } from "./constants";
|
||||
import { ActionButton } from "./ScenariosHeader";
|
||||
|
||||
export default function AddVariantButton() {
|
||||
@@ -33,25 +33,11 @@ export default function AddVariantButton() {
|
||||
<Flex w="100%" justifyContent="flex-end">
|
||||
<ActionButton
|
||||
onClick={onClick}
|
||||
py={5}
|
||||
py={7}
|
||||
leftIcon={<Icon as={loading ? Spinner : BsPlus} boxSize={6} mr={loading ? 1 : 0} />}
|
||||
>
|
||||
<Text display={{ base: "none", md: "flex" }}>Add Variant</Text>
|
||||
</ActionButton>
|
||||
{/* <Button
|
||||
alignItems="center"
|
||||
justifyContent="center"
|
||||
fontWeight="normal"
|
||||
bgColor="transparent"
|
||||
_hover={{ bgColor: "gray.100" }}
|
||||
px={cellPadding.x}
|
||||
onClick={onClick}
|
||||
height="unset"
|
||||
minH={headerMinHeight}
|
||||
>
|
||||
<Icon as={loading ? Spinner : BsPlus} boxSize={6} mr={loading ? 1 : 0} />
|
||||
<Text display={{ base: "none", md: "flex" }}>Add Variant</Text>
|
||||
</Button> */}
|
||||
</Flex>
|
||||
);
|
||||
}
|
||||
53
app/src/components/OutputsTable/OutputCell/CellOptions.tsx
Normal file
@@ -0,0 +1,53 @@
|
||||
import { HStack, Icon, IconButton, Spinner, Tooltip, useDisclosure } from "@chakra-ui/react";
|
||||
import { BsArrowClockwise, BsInfoCircle } from "react-icons/bs";
|
||||
import { useExperimentAccess } from "~/utils/hooks";
|
||||
import PromptModal from "./PromptModal";
|
||||
import { type RouterOutputs } from "~/utils/api";
|
||||
|
||||
export const CellOptions = ({
|
||||
cell,
|
||||
refetchingOutput,
|
||||
refetchOutput,
|
||||
}: {
|
||||
cell: RouterOutputs["scenarioVariantCells"]["get"];
|
||||
refetchingOutput: boolean;
|
||||
refetchOutput: () => void;
|
||||
}) => {
|
||||
const { canModify } = useExperimentAccess();
|
||||
|
||||
const modalDisclosure = useDisclosure();
|
||||
|
||||
return (
|
||||
<HStack justifyContent="flex-end" w="full" spacing={1}>
|
||||
{cell && (
|
||||
<>
|
||||
<Tooltip label="See Prompt">
|
||||
<IconButton
|
||||
aria-label="See Prompt"
|
||||
icon={<Icon as={BsInfoCircle} boxSize={3.5} />}
|
||||
onClick={modalDisclosure.onOpen}
|
||||
size="xs"
|
||||
colorScheme="gray"
|
||||
color="gray.500"
|
||||
variant="ghost"
|
||||
/>
|
||||
</Tooltip>
|
||||
<PromptModal cell={cell} disclosure={modalDisclosure} />
|
||||
</>
|
||||
)}
|
||||
{canModify && (
|
||||
<Tooltip label="Refetch output">
|
||||
<IconButton
|
||||
size="xs"
|
||||
color="gray.500"
|
||||
variant="ghost"
|
||||
cursor="pointer"
|
||||
onClick={refetchOutput}
|
||||
aria-label="refetch output"
|
||||
icon={<Icon as={refetchingOutput ? Spinner : BsArrowClockwise} boxSize={4} />}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</HStack>
|
||||
);
|
||||
};
|
||||
29
app/src/components/OutputsTable/OutputCell/CellWrapper.tsx
Normal file
@@ -0,0 +1,29 @@
|
||||
import { type StackProps, VStack } from "@chakra-ui/react";
|
||||
import { type RouterOutputs } from "~/utils/api";
|
||||
import { type Scenario } from "../types";
|
||||
import { CellOptions } from "./CellOptions";
|
||||
import { OutputStats } from "./OutputStats";
|
||||
|
||||
const CellWrapper: React.FC<
|
||||
StackProps & {
|
||||
cell: RouterOutputs["scenarioVariantCells"]["get"] | undefined;
|
||||
hardRefetching: boolean;
|
||||
hardRefetch: () => void;
|
||||
mostRecentResponse:
|
||||
| NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>["modelResponses"][0]
|
||||
| undefined;
|
||||
scenario: Scenario;
|
||||
}
|
||||
> = ({ children, cell, hardRefetching, hardRefetch, mostRecentResponse, scenario, ...props }) => (
|
||||
<VStack w="full" alignItems="flex-start" {...props} px={2} py={2} h="100%">
|
||||
{cell && (
|
||||
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} cell={cell} />
|
||||
)}
|
||||
<VStack w="full" alignItems="flex-start" maxH={500} overflowY="auto" flex={1}>
|
||||
{children}
|
||||
</VStack>
|
||||
{mostRecentResponse && <OutputStats modelResponse={mostRecentResponse} scenario={scenario} />}
|
||||
</VStack>
|
||||
);
|
||||
|
||||
export default CellWrapper;
|
||||
@@ -1,17 +1,16 @@
|
||||
import { api } from "~/utils/api";
|
||||
import { type PromptVariant, type Scenario } from "../types";
|
||||
import { Text, VStack } from "@chakra-ui/react";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { Text } from "@chakra-ui/react";
|
||||
import stringify from "json-stringify-pretty-compact";
|
||||
import { Fragment, useEffect, useState, type ReactElement } from "react";
|
||||
import SyntaxHighlighter from "react-syntax-highlighter";
|
||||
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
|
||||
import stringify from "json-stringify-pretty-compact";
|
||||
import { type ReactElement, useState, useEffect, Fragment } from "react";
|
||||
import useSocket from "~/utils/useSocket";
|
||||
import { OutputStats } from "./OutputStats";
|
||||
import { RetryCountdown } from "./RetryCountdown";
|
||||
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
|
||||
import { api } from "~/utils/api";
|
||||
import { useHandledAsyncCallback, useScenarioVars } from "~/utils/hooks";
|
||||
import useSocket from "~/utils/useSocket";
|
||||
import { type PromptVariant, type Scenario } from "../types";
|
||||
import CellWrapper from "./CellWrapper";
|
||||
import { ResponseLog } from "./ResponseLog";
|
||||
import { CellContent } from "./CellContent";
|
||||
import { RetryCountdown } from "./RetryCountdown";
|
||||
|
||||
const WAITING_MESSAGE_INTERVAL = 20000;
|
||||
|
||||
@@ -23,10 +22,7 @@ export default function OutputCell({
|
||||
variant: PromptVariant;
|
||||
}): ReactElement | null {
|
||||
const utils = api.useContext();
|
||||
const experiment = useExperiment();
|
||||
const vars = api.templateVars.list.useQuery({
|
||||
experimentId: experiment.data?.id ?? "",
|
||||
}).data;
|
||||
const vars = useScenarioVars().data;
|
||||
|
||||
const scenarioVariables = scenario.variableValues as Record<string, string>;
|
||||
const templateHasVariables =
|
||||
@@ -36,7 +32,7 @@ export default function OutputCell({
|
||||
|
||||
if (!templateHasVariables) disabledReason = "Add a value to the scenario variables to see output";
|
||||
|
||||
const [refetchInterval, setRefetchInterval] = useState(0);
|
||||
const [refetchInterval, setRefetchInterval] = useState<number | false>(false);
|
||||
const { data: cell, isLoading: queryLoading } = api.scenarioVariantCells.get.useQuery(
|
||||
{ scenarioId: scenario.id, variantId: variant.id },
|
||||
{ refetchInterval },
|
||||
@@ -47,7 +43,7 @@ export default function OutputCell({
|
||||
|
||||
type OutputSchema = Parameters<typeof provider.normalizeOutput>[0];
|
||||
|
||||
const { mutateAsync: hardRefetchMutate } = api.scenarioVariantCells.forceRefetch.useMutation();
|
||||
const { mutateAsync: hardRefetchMutate } = api.scenarioVariantCells.hardRefetch.useMutation();
|
||||
const [hardRefetch, hardRefetching] = useHandledAsyncCallback(async () => {
|
||||
await hardRefetchMutate({ scenarioId: scenario.id, variantId: variant.id });
|
||||
await utils.scenarioVariantCells.get.invalidate({
|
||||
@@ -67,38 +63,47 @@ export default function OutputCell({
|
||||
cell.retrievalStatus === "PENDING" ||
|
||||
cell.retrievalStatus === "IN_PROGRESS" ||
|
||||
hardRefetching;
|
||||
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : 0), [awaitingOutput]);
|
||||
|
||||
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : false), [awaitingOutput]);
|
||||
|
||||
// TODO: disconnect from socket if we're not streaming anymore
|
||||
const streamedMessage = useSocket<OutputSchema>(cell?.id);
|
||||
|
||||
const mostRecentResponse = cell?.modelResponses[cell.modelResponses.length - 1];
|
||||
|
||||
const wrapperProps: Parameters<typeof CellWrapper>[0] = {
|
||||
cell,
|
||||
hardRefetching,
|
||||
hardRefetch,
|
||||
mostRecentResponse,
|
||||
scenario,
|
||||
};
|
||||
|
||||
if (!vars) return null;
|
||||
|
||||
if (!cell && !fetchingOutput)
|
||||
return (
|
||||
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
|
||||
<CellWrapper {...wrapperProps}>
|
||||
<Text color="gray.500">Error retrieving output</Text>
|
||||
</CellContent>
|
||||
</CellWrapper>
|
||||
);
|
||||
|
||||
if (cell && cell.errorMessage) {
|
||||
return (
|
||||
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
|
||||
<CellWrapper {...wrapperProps}>
|
||||
<Text color="red.500">{cell.errorMessage}</Text>
|
||||
</CellContent>
|
||||
</CellWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
if (disabledReason) return <Text color="gray.500">{disabledReason}</Text>;
|
||||
|
||||
const mostRecentResponse = cell?.modelResponses[cell.modelResponses.length - 1];
|
||||
const showLogs = !streamedMessage && !mostRecentResponse?.output;
|
||||
const showLogs = !streamedMessage && !mostRecentResponse?.respPayload;
|
||||
|
||||
if (showLogs)
|
||||
return (
|
||||
<CellContent
|
||||
hardRefetching={hardRefetching}
|
||||
hardRefetch={hardRefetch}
|
||||
<CellWrapper
|
||||
{...wrapperProps}
|
||||
alignItems="flex-start"
|
||||
fontFamily="inconsolata, monospace"
|
||||
spacing={0}
|
||||
@@ -111,8 +116,13 @@ export default function OutputCell({
|
||||
? response.receivedAt.getTime()
|
||||
: Date.now();
|
||||
if (response.requestedAt) {
|
||||
numWaitingMessages = Math.floor(
|
||||
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
|
||||
numWaitingMessages = Math.min(
|
||||
Math.floor(
|
||||
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
|
||||
),
|
||||
// Don't try to render more than 15, it'll use too much CPU and
|
||||
// break the page
|
||||
15,
|
||||
);
|
||||
}
|
||||
return (
|
||||
@@ -124,18 +134,23 @@ export default function OutputCell({
|
||||
Array.from({ length: numWaitingMessages }, (_, i) => (
|
||||
<ResponseLog
|
||||
key={`waiting-${i}`}
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
time={new Date(response.requestedAt!.getTime() + i * WAITING_MESSAGE_INTERVAL)}
|
||||
title="Waiting for response"
|
||||
time={
|
||||
new Date(
|
||||
(response.requestedAt?.getTime?.() ?? 0) +
|
||||
(i + 1) * WAITING_MESSAGE_INTERVAL,
|
||||
)
|
||||
}
|
||||
title="Waiting for response..."
|
||||
/>
|
||||
))}
|
||||
{response.receivedAt && (
|
||||
<ResponseLog
|
||||
time={response.receivedAt}
|
||||
title="Response received from API"
|
||||
message={`statusCode: ${response.statusCode ?? ""}\n ${
|
||||
response.errorMessage ?? ""
|
||||
}`}
|
||||
message={[
|
||||
response.statusCode ? `Status: ${response.statusCode}\n` : "",
|
||||
response.errorMessage ?? "",
|
||||
].join("")}
|
||||
/>
|
||||
)}
|
||||
</Fragment>
|
||||
@@ -144,61 +159,38 @@ export default function OutputCell({
|
||||
{mostRecentResponse?.retryTime && (
|
||||
<RetryCountdown retryTime={mostRecentResponse.retryTime} />
|
||||
)}
|
||||
</CellContent>
|
||||
</CellWrapper>
|
||||
);
|
||||
|
||||
const normalizedOutput = mostRecentResponse?.output
|
||||
? provider.normalizeOutput(mostRecentResponse?.output)
|
||||
const normalizedOutput = mostRecentResponse?.respPayload
|
||||
? provider.normalizeOutput(mostRecentResponse?.respPayload)
|
||||
: streamedMessage
|
||||
? provider.normalizeOutput(streamedMessage)
|
||||
: null;
|
||||
|
||||
if (mostRecentResponse?.output && normalizedOutput?.type === "json") {
|
||||
if (mostRecentResponse?.respPayload && normalizedOutput?.type === "json") {
|
||||
return (
|
||||
<VStack
|
||||
w="100%"
|
||||
h="100%"
|
||||
fontSize="xs"
|
||||
flexWrap="wrap"
|
||||
overflowX="hidden"
|
||||
justifyContent="space-between"
|
||||
>
|
||||
<CellContent
|
||||
hardRefetching={hardRefetching}
|
||||
hardRefetch={hardRefetch}
|
||||
w="full"
|
||||
flex={1}
|
||||
spacing={0}
|
||||
<CellWrapper {...wrapperProps}>
|
||||
<SyntaxHighlighter
|
||||
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
|
||||
language="json"
|
||||
style={docco}
|
||||
lineProps={{
|
||||
style: { wordBreak: "break-all", whiteSpace: "pre-wrap" },
|
||||
}}
|
||||
wrapLines
|
||||
>
|
||||
<SyntaxHighlighter
|
||||
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
|
||||
language="json"
|
||||
style={docco}
|
||||
lineProps={{
|
||||
style: { wordBreak: "break-all", whiteSpace: "pre-wrap" },
|
||||
}}
|
||||
wrapLines
|
||||
>
|
||||
{stringify(normalizedOutput.value, { maxLength: 40 })}
|
||||
</SyntaxHighlighter>
|
||||
</CellContent>
|
||||
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
|
||||
</VStack>
|
||||
{stringify(normalizedOutput.value, { maxLength: 40 })}
|
||||
</SyntaxHighlighter>
|
||||
</CellWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
const contentToDisplay = (normalizedOutput?.type === "text" && normalizedOutput.value) || "";
|
||||
|
||||
return (
|
||||
<VStack w="100%" h="100%" justifyContent="space-between" whiteSpace="pre-wrap">
|
||||
<VStack w="full" alignItems="flex-start" spacing={0}>
|
||||
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
|
||||
<Text>{contentToDisplay}</Text>
|
||||
</CellContent>
|
||||
</VStack>
|
||||
{mostRecentResponse?.output && (
|
||||
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
|
||||
)}
|
||||
</VStack>
|
||||
<CellWrapper {...wrapperProps}>
|
||||
<Text whiteSpace="pre-wrap">{contentToDisplay}</Text>
|
||||
</CellWrapper>
|
||||
);
|
||||
}
|
||||