Compare commits
73 Commits
no-model
...
world-cham
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
156f248c3a | ||
|
|
65a76cddc5 | ||
|
|
c88266bcd4 | ||
|
|
1bf9554eca | ||
|
|
1fb428ef4a | ||
|
|
6316eaae6d | ||
|
|
8513924ea5 | ||
|
|
51d64baae9 | ||
|
|
26b6fa4f0c | ||
|
|
807665fdc1 | ||
|
|
d6597d2c8a | ||
|
|
566d67bf48 | ||
|
|
d4fb8b689a | ||
|
|
98b231c8bd | ||
|
|
45afb1f1f4 | ||
|
|
2bffb03766 | ||
|
|
223b990005 | ||
|
|
fa61c9c472 | ||
|
|
1309a6ec5d | ||
|
|
17a6fd31a5 | ||
|
|
e1cbeccb90 | ||
|
|
d6b97b29f7 | ||
|
|
09140f8b5f | ||
|
|
9952dd93d8 | ||
|
|
e0b457c6c5 | ||
|
|
0c37506975 | ||
|
|
2b2e0ab8ee | ||
|
|
3dbb06ec00 | ||
|
|
85d42a014b | ||
|
|
7d1ded3b18 | ||
|
|
b00f6dd04b | ||
|
|
2e395e4d39 | ||
|
|
4b06d05908 | ||
|
|
aabf355b81 | ||
|
|
61e5f0775d | ||
|
|
cc1d1178da | ||
|
|
7466db63df | ||
|
|
79a0b03bf8 | ||
|
|
6fb7a82d72 | ||
|
|
4ea30a3ba3 | ||
|
|
52d1d5c7ee | ||
|
|
46036a44d2 | ||
|
|
3753fe5c16 | ||
|
|
213a00a8e6 | ||
|
|
af9943eefc | ||
|
|
741128e0f4 | ||
|
|
aff14539d8 | ||
|
|
1af81a50a9 | ||
|
|
7e1fbb3767 | ||
|
|
a5d972005e | ||
|
|
a180b5bef2 | ||
|
|
55c697223e | ||
|
|
9978075867 | ||
|
|
847753c32b | ||
|
|
372c2512c9 | ||
|
|
332a2101c0 | ||
|
|
1822fe198e | ||
|
|
f06e1db3db | ||
|
|
ded6678e97 | ||
|
|
9314a86857 | ||
|
|
54dcb4a567 | ||
|
|
2c8c8d07cf | ||
|
|
e885bdd365 | ||
|
|
86dc36a656 | ||
|
|
55c077d604 | ||
|
|
e598e454d0 | ||
|
|
6e3f90cd2f | ||
|
|
eec894e101 | ||
|
|
f797fc3fa4 | ||
|
|
335dc0357f | ||
|
|
e6e2c706c2 | ||
|
|
7d2166b305 | ||
|
|
2c4ba6eb9b |
@@ -17,6 +17,9 @@ DATABASE_URL="postgresql://postgres:postgres@localhost:5432/openpipe?schema=publ
|
|||||||
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-secret-api-key
|
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-secret-api-key
|
||||||
OPENAI_API_KEY=""
|
OPENAI_API_KEY=""
|
||||||
|
|
||||||
|
# Replicate API token. Create a token here: https://replicate.com/account/api-tokens
|
||||||
|
REPLICATE_API_TOKEN=""
|
||||||
|
|
||||||
NEXT_PUBLIC_SOCKET_URL="http://localhost:3318"
|
NEXT_PUBLIC_SOCKET_URL="http://localhost:3318"
|
||||||
|
|
||||||
# Next Auth
|
# Next Auth
|
||||||
|
|||||||
@@ -37,6 +37,7 @@ const config = {
|
|||||||
"warn",
|
"warn",
|
||||||
{ vars: "all", varsIgnorePattern: "^_", args: "after-used", argsIgnorePattern: "^_" },
|
{ vars: "all", varsIgnorePattern: "^_", args: "after-used", argsIgnorePattern: "^_" },
|
||||||
],
|
],
|
||||||
|
"react/no-unescaped-entities": "off",
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
src/codegen/openai.schema.json
|
*.schema.json
|
||||||
pnpm-lock.yaml
|
pnpm-lock.yaml
|
||||||
5
.vscode/settings.json
vendored
5
.vscode/settings.json
vendored
@@ -1,6 +1,3 @@
|
|||||||
{
|
{
|
||||||
"eslint.format.enable": true,
|
"eslint.format.enable": true
|
||||||
"editor.codeActionsOnSave": {
|
|
||||||
"source.fixAll.eslint": true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
5
@types/nextjs-routes.d.ts
vendored
5
@types/nextjs-routes.d.ts
vendored
@@ -13,10 +13,13 @@ declare module "nextjs-routes" {
|
|||||||
export type Route =
|
export type Route =
|
||||||
| StaticRoute<"/account/signin">
|
| StaticRoute<"/account/signin">
|
||||||
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
|
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
|
||||||
|
| StaticRoute<"/api/experiments/og-image">
|
||||||
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
|
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
|
||||||
| DynamicRoute<"/experiments/[id]", { "id": string }>
|
| DynamicRoute<"/experiments/[id]", { "id": string }>
|
||||||
| StaticRoute<"/experiments">
|
| StaticRoute<"/experiments">
|
||||||
| StaticRoute<"/">;
|
| StaticRoute<"/">
|
||||||
|
| StaticRoute<"/world-champs">
|
||||||
|
| StaticRoute<"/world-champs/signup">;
|
||||||
|
|
||||||
interface StaticRoute<Pathname> {
|
interface StaticRoute<Pathname> {
|
||||||
pathname: Pathname;
|
pathname: Pathname;
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ FROM base as builder
|
|||||||
|
|
||||||
# Include all NEXT_PUBLIC_* env vars here
|
# Include all NEXT_PUBLIC_* env vars here
|
||||||
ARG NEXT_PUBLIC_POSTHOG_KEY
|
ARG NEXT_PUBLIC_POSTHOG_KEY
|
||||||
ARG NEXT_PUBLIC_IS_PUBLIC_PLAYGROUND
|
|
||||||
ARG NEXT_PUBLIC_SOCKET_URL
|
ARG NEXT_PUBLIC_SOCKET_URL
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|||||||
12
README.md
12
README.md
@@ -8,10 +8,10 @@ OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It l
|
|||||||
|
|
||||||
These are simple experiments users have created that show how OpenPipe works.
|
These are simple experiments users have created that show how OpenPipe works.
|
||||||
|
|
||||||
- [Country Capitals](https://openpipe.ai/experiments/11111111-1111-1111-1111-111111111111)
|
- [Country Capitals](https://app.openpipe.ai/experiments/11111111-1111-1111-1111-111111111111)
|
||||||
- [Reddit User Needs](https://openpipe.ai/experiments/22222222-2222-2222-2222-222222222222)
|
- [Reddit User Needs](https://app.openpipe.ai/experiments/22222222-2222-2222-2222-222222222222)
|
||||||
- [OpenAI Function Calls](https://openpipe.ai/experiments/2ebbdcb3-ed51-456e-87dc-91f72eaf3e2b)
|
- [OpenAI Function Calls](https://app.openpipe.ai/experiments/2ebbdcb3-ed51-456e-87dc-91f72eaf3e2b)
|
||||||
- [Activity Classification](https://openpipe.ai/experiments/3950940f-ab6b-4b74-841d-7e9dbc4e4ff8)
|
- [Activity Classification](https://app.openpipe.ai/experiments/3950940f-ab6b-4b74-841d-7e9dbc4e4ff8)
|
||||||
|
|
||||||
<img src="https://github.com/openpipe/openpipe/assets/176426/fc7624c6-5b65-4d4d-82b7-4a816f3e5678" alt="demo" height="400px">
|
<img src="https://github.com/openpipe/openpipe/assets/176426/fc7624c6-5b65-4d4d-82b7-4a816f3e5678" alt="demo" height="400px">
|
||||||
|
|
||||||
@@ -43,7 +43,9 @@ Natively supports [OpenAI function calls](https://openai.com/blog/function-calli
|
|||||||
|
|
||||||
## Supported Models
|
## Supported Models
|
||||||
|
|
||||||
OpenPipe currently supports GPT-3.5 and GPT-4. Wider model support is planned.
|
- All models available through the OpenAI [chat completion API](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
|
||||||
|
- Llama2 [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat), [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat), [70b chat](https://replicate.com/replicate/llama70b-v2-chat).
|
||||||
|
- Anthropic's [Claude 1 Instant](https://www.anthropic.com/index/introducing-claude) and [Claude 2](https://www.anthropic.com/index/claude-2)
|
||||||
|
|
||||||
## Running Locally
|
## Running Locally
|
||||||
|
|
||||||
|
|||||||
18
package.json
18
package.json
@@ -12,7 +12,7 @@
|
|||||||
"dev:next": "next dev",
|
"dev:next": "next dev",
|
||||||
"dev:wss": "pnpm tsx --watch src/wss-server.ts",
|
"dev:wss": "pnpm tsx --watch src/wss-server.ts",
|
||||||
"dev:worker": "NODE_ENV='development' pnpm tsx --watch src/server/tasks/worker.ts",
|
"dev:worker": "NODE_ENV='development' pnpm tsx --watch src/server/tasks/worker.ts",
|
||||||
"dev": "concurrently --kill-others 'pnpm dev:next' 'pnpm dev:wss'",
|
"dev": "concurrently --kill-others 'pnpm dev:next' 'pnpm dev:wss' 'pnpm dev:worker'",
|
||||||
"postinstall": "prisma generate",
|
"postinstall": "prisma generate",
|
||||||
"lint": "next lint",
|
"lint": "next lint",
|
||||||
"start": "next start",
|
"start": "next start",
|
||||||
@@ -21,6 +21,8 @@
|
|||||||
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'"
|
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@anthropic-ai/sdk": "^0.5.8",
|
||||||
|
"@apidevtools/json-schema-ref-parser": "^10.1.0",
|
||||||
"@babel/preset-typescript": "^7.22.5",
|
"@babel/preset-typescript": "^7.22.5",
|
||||||
"@babel/standalone": "^7.22.9",
|
"@babel/standalone": "^7.22.9",
|
||||||
"@chakra-ui/next-js": "^2.1.4",
|
"@chakra-ui/next-js": "^2.1.4",
|
||||||
@@ -39,6 +41,8 @@
|
|||||||
"@trpc/next": "^10.26.0",
|
"@trpc/next": "^10.26.0",
|
||||||
"@trpc/react-query": "^10.26.0",
|
"@trpc/react-query": "^10.26.0",
|
||||||
"@trpc/server": "^10.26.0",
|
"@trpc/server": "^10.26.0",
|
||||||
|
"@vercel/og": "^0.5.9",
|
||||||
|
"ast-types": "^0.14.2",
|
||||||
"chroma-js": "^2.4.2",
|
"chroma-js": "^2.4.2",
|
||||||
"concurrently": "^8.2.0",
|
"concurrently": "^8.2.0",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
@@ -51,10 +55,13 @@
|
|||||||
"graphile-worker": "^0.13.0",
|
"graphile-worker": "^0.13.0",
|
||||||
"immer": "^10.0.2",
|
"immer": "^10.0.2",
|
||||||
"isolated-vm": "^4.5.0",
|
"isolated-vm": "^4.5.0",
|
||||||
|
"json-schema-to-typescript": "^13.0.2",
|
||||||
"json-stringify-pretty-compact": "^4.0.0",
|
"json-stringify-pretty-compact": "^4.0.0",
|
||||||
|
"jsonschema": "^1.4.1",
|
||||||
"lodash-es": "^4.17.21",
|
"lodash-es": "^4.17.21",
|
||||||
"next": "^13.4.2",
|
"next": "^13.4.2",
|
||||||
"next-auth": "^4.22.1",
|
"next-auth": "^4.22.1",
|
||||||
|
"next-query-params": "^4.2.3",
|
||||||
"nextjs-routes": "^2.0.1",
|
"nextjs-routes": "^2.0.1",
|
||||||
"openai": "4.0.0-beta.2",
|
"openai": "4.0.0-beta.2",
|
||||||
"pluralize": "^8.0.0",
|
"pluralize": "^8.0.0",
|
||||||
@@ -68,10 +75,16 @@
|
|||||||
"react-select": "^5.7.4",
|
"react-select": "^5.7.4",
|
||||||
"react-syntax-highlighter": "^15.5.0",
|
"react-syntax-highlighter": "^15.5.0",
|
||||||
"react-textarea-autosize": "^8.5.0",
|
"react-textarea-autosize": "^8.5.0",
|
||||||
|
"recast": "^0.23.3",
|
||||||
|
"replicate": "^0.12.3",
|
||||||
"socket.io": "^4.7.1",
|
"socket.io": "^4.7.1",
|
||||||
"socket.io-client": "^4.7.1",
|
"socket.io-client": "^4.7.1",
|
||||||
"superjson": "1.12.2",
|
"superjson": "1.12.2",
|
||||||
"tsx": "^3.12.7",
|
"tsx": "^3.12.7",
|
||||||
|
"type-fest": "^4.0.0",
|
||||||
|
"use-query-params": "^2.2.1",
|
||||||
|
"uuid": "^9.0.0",
|
||||||
|
"vite-tsconfig-paths": "^4.2.0",
|
||||||
"zod": "^3.21.4",
|
"zod": "^3.21.4",
|
||||||
"zustand": "^4.3.9"
|
"zustand": "^4.3.9"
|
||||||
},
|
},
|
||||||
@@ -83,6 +96,7 @@
|
|||||||
"@types/cors": "^2.8.13",
|
"@types/cors": "^2.8.13",
|
||||||
"@types/eslint": "^8.37.0",
|
"@types/eslint": "^8.37.0",
|
||||||
"@types/express": "^4.17.17",
|
"@types/express": "^4.17.17",
|
||||||
|
"@types/json-schema": "^7.0.12",
|
||||||
"@types/lodash-es": "^4.17.8",
|
"@types/lodash-es": "^4.17.8",
|
||||||
"@types/node": "^18.16.0",
|
"@types/node": "^18.16.0",
|
||||||
"@types/pluralize": "^0.0.30",
|
"@types/pluralize": "^0.0.30",
|
||||||
@@ -90,8 +104,10 @@
|
|||||||
"@types/react": "^18.2.6",
|
"@types/react": "^18.2.6",
|
||||||
"@types/react-dom": "^18.2.4",
|
"@types/react-dom": "^18.2.4",
|
||||||
"@types/react-syntax-highlighter": "^15.5.7",
|
"@types/react-syntax-highlighter": "^15.5.7",
|
||||||
|
"@types/uuid": "^9.0.2",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.59.6",
|
"@typescript-eslint/eslint-plugin": "^5.59.6",
|
||||||
"@typescript-eslint/parser": "^5.59.6",
|
"@typescript-eslint/parser": "^5.59.6",
|
||||||
|
"csv-parse": "^5.4.0",
|
||||||
"eslint": "^8.40.0",
|
"eslint": "^8.40.0",
|
||||||
"eslint-config-next": "^13.4.2",
|
"eslint-config-next": "^13.4.2",
|
||||||
"eslint-plugin-unused-imports": "^2.0.0",
|
"eslint-plugin-unused-imports": "^2.0.0",
|
||||||
|
|||||||
616
pnpm-lock.yaml
generated
616
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
84
prisma/datasets/validated_tweets.csv
Normal file
84
prisma/datasets/validated_tweets.csv
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
Text,sentiment,emotion
|
||||||
|
@dell your customer service is horrible especially agent syedfaisal who has made this experience of purchasing a new computer downright awful and I’ll reconsider ever buying a Dell in the future @DellTech,negative,anger
|
||||||
|
@zacokalo @Dell @DellCares @Dell give the man what he paid for!,neutral,anger
|
||||||
|
"COOKING STREAM DAY!!! Ty to @Alienware for sponsoring this stream! I’ll be making a bunch of Japanese Alien themed foods hehe
|
||||||
|
|
||||||
|
Come check it out! https://t.co/m06tJQ06zk
|
||||||
|
|
||||||
|
#alienwarepartner #intelgaming @Dell @IntelGaming https://t.co/qOdQX2E8VD",positive,joy
|
||||||
|
@emijuju_ @Alienware @Dell @intel Beautiful 😍❤️😻,positive,joy
|
||||||
|
"What's your biggest data management challenge? • Cloud complexity? • Lengthy tech refresh cycles? • Capital budget constraints? Solve your challenges with as-a-Storage. Get simplicity, agility & control with @Dell #APEX. https://t.co/mCblMtH931 https://t.co/eepKNZ4Ai3",neutral,optimism
|
||||||
|
"This week we were at the ""Top Gun"" themed @Dell Product Expo. Eddie Muñoz met Maverick look-alike, California Tom Cruise (Jerome LeBlanc)!
|
||||||
|
|
||||||
|
""I feel the need, the need for speed."" - Maverick
|
||||||
|
#topgun #topgunmaverick #dell #delltechnologies #lockncharge https://t.co/QHYH2EbMjq",positive,joy
|
||||||
|
"Itsss been more than a week...i m following up with dell for troubleshootings...my https://t.co/lWhg2YKhQa suffering so as my hard earned money...hightly disappointed...contd..
|
||||||
|
@DellCares @Dell",negative,sadness
|
||||||
|
"@ashu_k7 @Dell Pathetic!!!!! I Dont mind taking legal action, this is deficency of service for which the customer is nt getting help..",negative,anger
|
||||||
|
@ashu_k7 @Dell Making life unhappy is the new tag line of #Dell,negative,sadness
|
||||||
|
"@Dell If you are buying a Dell, make sure you are making your life hell.
|
||||||
|
Better buy other laptops. If you wanted to opt for Dell better opt for garbage on the streets.",negative,anger
|
||||||
|
"MY DESK'S FINAL FORM? Seriously, I'm finally happy with my monitor setup here... and I'll keep this setup whenever I move... FOREVER. What do you think?
|
||||||
|
https://t.co/WJZ2JXtOnX
|
||||||
|
@Alienware @Dell cheers. https://t.co/6Whhldfpv0",positive,joy
|
||||||
|
"@Dell Dell Alienware computer has had software problems with SupportAssist since purchase. Dell, despite paying for Premium Support, has never fixed issues. Latest solution was to erase everything and reload....SupportAssist still doesn't work.",negative,anger
|
||||||
|
"HUGE congratulations to Startup Battle 3.0 winner ➡️ @Ox_Fulfillment x @cyborgcharu for being featured in @BusinessInsider & @Dell showcasing the journey at Ox! 🚀🚀🚀
|
||||||
|
|
||||||
|
We love to see our portfolio companies continuing to BUILD SOMETHING FROM NOTHING! 🔥 https://t.co/awBkn5ippB",positive,joy
|
||||||
|
@Dell happy Friday!,positive,joy
|
||||||
|
"@intel Core i5 1135G7 - 4732 points
|
||||||
|
@intel Core i5 1235 - 6619 points
|
||||||
|
@Dell Latitude 5420 x 5430.
|
||||||
|
Cinebench R23. Good job Intel!",positive,joy
|
||||||
|
@Dell india we purchased 52 docking station and we have around 100 users using dell laptop as well as dell monitor now they are refusing to replace my faulty product and disconnecting my every call....,negative,anger
|
||||||
|
"It's another year ans another day But cant fill it in yet the child hood dreams.
|
||||||
|
It's my birthdy today. Can anyone of you guys bless me with a simplest gaming oc that can run
|
||||||
|
@DOTA2 ?
|
||||||
|
@Dell @HP @VastGG @Acer @Alienware @Lenovo @toshiba @IBM @Fujitsu_Global @NEC https://t.co/69G8tL9sN8",neutral,joy
|
||||||
|
"@idoccor @Dell That's always the decision—wait, or, look elsewhere. In this case, I think I unfortunately need to wait since there are only two monitors with these specs and I don't like the other one 😂",negative,sadness
|
||||||
|
"@MichaelDell @Dell @DellCares For how long this will continue. It is high time you either fix the problem for good or replace the complete laptop. Spent over 60+ hours with Customer Care teams, which is not helping. Cannot keep going on like this.",negative,anger
|
||||||
|
"@Dell @DellCares but no, not really",neutral,sadness
|
||||||
|
"Business innovation requires insight, agility and efficiency. How do you get there? RP PRO, LLC recommends starting by proactively managing IT infrastructure with #OpenManage Systems from @Dell. https://t.co/fBcK1lfFMu https://t.co/xWHLkkHCjn",neutral,optimism
|
||||||
|
@Dell Yessirrrrr #NationalCoffeeDay,positive,joy
|
||||||
|
"New blog post from @Dell shared on https://t.co/EgfPChB8AT
|
||||||
|
|
||||||
|
Re-routing Our Connected and Autonomous Future https://t.co/AW8EHQrbd6
|
||||||
|
|
||||||
|
#future #futuretech #techinnovation https://t.co/koX8stKPsr",neutral,joy
|
||||||
|
"In a free-market economy, the folks @IronMountain can set prices as they see fit. Their customers are also free to find better prices at competitors like @Dell
|
||||||
|
@H3CGlobal @HPE
|
||||||
|
https://t.co/reZ56DNTBI",neutral,optimism
|
||||||
|
"Delighted to chat with many of our partners here in person at @Intel Innovation! @Dell, @Lenovo, @Supermicro_SMCI, @QuantaQCT #IntelON https://t.co/BxIeGW8deN",positive,joy
|
||||||
|
"A special gracias to our Startup Chica San Antonio 2022 sponsors @eBay, @jcpenney, @Barbie, @HEB, @Dell, @Honda, @SouthsideSATX💜✨ https://t.co/lZ6WWkziHl",positive,joy
|
||||||
|
"When your team decides to start supporting developers, your #ops must change too. More from @cote and @Dell Developer Community Manager @barton808: https://t.co/W6f1oMiTgV",neutral,optimism
|
||||||
|
@EmDStowers @LASERGIANT1 @ohwormongod @Ludovician_Vega @Dell our boy snitchin,neutral,anger
|
||||||
|
A 1st place dmi:Design Value Award goes to @Dell for a packaging modernization initiative that helped them get closer to their corporate Moonshot Sustainability Goal of 100% recycled or renewable packaging by 2030. More at https://t.co/dnhZWWLCQC #designvalue #DVA22,positive,optimism
|
||||||
|
Reducing deployment and maintenance complexity is the goal behind @dell and @WindRiver's new collaboration. https://t.co/2PxQgPuHUU,positive,optimism
|
||||||
|
@jaserhunter @Dell Love the sales pitch lol,positive,joy
|
||||||
|
@Dell india we purchased 52 docking station and we have around 100 users using dell laptop as well as dell monitor now they are refusing to replace my faulty product and disconnecting my every call....,negative,anger
|
||||||
|
@ashu_k7 @Dell One more example.. their technical support is also worse. https://t.co/20atSgI4fg,negative,anger
|
||||||
|
*angry screeches about @Dell proprietary MBR windows 8.1 partitions not being able to save as an img in clonezilla *,negative,anger
|
||||||
|
@socialitebooks @BBYC_Gamers @Dell @Alienware @BestBuyCanada @intelcanada Congratulations!!!,positive,joy
|
||||||
|
"Thank you to the @dell team for coming out to volunteer today! We truly appreciate your hard work and look forward to seeing you again soon!
|
||||||
|
|
||||||
|
If you and your team are interested in helping out at the UMLAUF, visit our website for more information: https://t.co/lVfsZT2ogS https://t.co/eLz0FY0y4M",positive,joy
|
||||||
|
"@TheCaramelGamer @intel @bravadogaming @Intel_Africa @Dell @DellTech @DellTechMEA @Alienware @IntelUK we love to see it.
|
||||||
|
|
||||||
|
Also also actually actually whoever did that artwork? 🔥🔥🔥 am a fan.",positive,joy
|
||||||
|
"LOVING MY DELL 2 IN 1 LAPTOP
|
||||||
|
YAYY 🥳🥳
|
||||||
|
@Dell #DellInspiron #DellLaptop https://t.co/vib96jf3tC",positive,joy
|
||||||
|
@Azure @OracleItalia @AWS_Italy @lenovoitalia @Dell discussing the future of #HPC during the #hpcroundtable22 in Turin today #highperformancecomputing https://t.co/jJ1WqBulPF,neutral,joy
|
||||||
|
Attracting talent @AmericanChamber. @marg_cola @Dell speaks of quality of life connectivity and the Opportunity for development being so crucial. Housing availability is now impacting on decision making for potential candidates. #WhyCork,positive,optimism
|
||||||
|
.@Dell partners with @WindRiver on modular cloud-native telecommunications infrastructure https://t.co/4SWATspwCP @SiliconANGLE @Mike_Wheatley @holgermu @constellationr,neutral,joy
|
||||||
|
@Dell Not buy Dell Inspiron laptop,neutral,sadness
|
||||||
|
"@dell #delltechforum reminding us IDC have predicted that by 2024, 50% of everything we consume in technology will be as a service https://t.co/3UBiZJX0LE",neutral,optimism
|
||||||
|
@RachMurph @HETTShow @Dell Thank you for coming! Great evening,positive,joy
|
||||||
|
Congratulations to Jason M of Moncton NB on winning a @Dell @Alienware m15 R7 15.6″ gaming laptop from @BestBuyCanada and @intelcanada's gaming days #contest on the blog. Visit https://t.co/VryaY5Rvv9 to learn about tech and for chances to win new tech. https://t.co/T6n0dzF6oL,positive,joy
|
||||||
|
@MattVisiwig @Dell Sour taste for sure 😶 But don't let ego distract you from what you really want to buy 😁,neutral,optimism
|
||||||
|
"Massive thank you goes to sponsors @HendersonLoggie @lindsaysnews @Dell @unity, all of our fantastic judges and mentors and the team at @EGX and @ExCeLLondon.
|
||||||
|
|
||||||
|
Big congratulations also to all of our other @AbertayDare teams - an amazing year! #Dare2022 https://t.co/jYe4agO7lW",positive,joy
|
||||||
|
"@timetcetera @rahaug Nah, I just need @Dell to start paying me comissions 😂",neutral,joy
|
||||||
|
"""Whether you’re an engineer, a designer, or work in supply chain management or sales, there are always opportunities to think about sustainability and how you can do things more efficiently."" 👏 — Oliver Campbell, Director of Packaging Engineering, @Dell https://t.co/vUJLTWNFwP https://t.co/GJWAzGfAxJ",positive,optimism
|
||||||
|
"Hi, my name is @listerepvp and I support @Dell, always.",positive,joy
|
||||||
|
@@ -0,0 +1,17 @@
|
|||||||
|
-- Add new columns allowing NULL values
|
||||||
|
ALTER TABLE "PromptVariant"
|
||||||
|
ADD COLUMN "constructFnVersion" INTEGER,
|
||||||
|
ADD COLUMN "modelProvider" TEXT;
|
||||||
|
|
||||||
|
-- Update existing records to have the default values
|
||||||
|
UPDATE "PromptVariant"
|
||||||
|
SET "constructFnVersion" = 1,
|
||||||
|
"modelProvider" = 'openai/ChatCompletion'
|
||||||
|
WHERE "constructFnVersion" IS NULL OR "modelProvider" IS NULL;
|
||||||
|
|
||||||
|
-- Alter table to set NOT NULL constraint
|
||||||
|
ALTER TABLE "PromptVariant"
|
||||||
|
ALTER COLUMN "constructFnVersion" SET NOT NULL,
|
||||||
|
ALTER COLUMN "modelProvider" SET NOT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE "ScenarioVariantCell" ADD COLUMN "prompt" JSONB;
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
/*
|
||||||
|
Warnings:
|
||||||
|
|
||||||
|
- You are about to drop the column `streamingChannel` on the `ScenarioVariantCell` table. All the data in the column will be lost.
|
||||||
|
|
||||||
|
*/
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "ScenarioVariantCell" DROP COLUMN "streamingChannel";
|
||||||
@@ -0,0 +1,52 @@
|
|||||||
|
-- DropForeignKey
|
||||||
|
ALTER TABLE "ModelOutput" DROP CONSTRAINT "ModelOutput_scenarioVariantCellId_fkey";
|
||||||
|
|
||||||
|
-- DropForeignKey
|
||||||
|
ALTER TABLE "OutputEvaluation" DROP CONSTRAINT "OutputEvaluation_modelOutputId_fkey";
|
||||||
|
|
||||||
|
-- DropIndex
|
||||||
|
DROP INDEX "OutputEvaluation_modelOutputId_evaluationId_key";
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "OutputEvaluation" RENAME COLUMN "modelOutputId" TO "modelResponseId";
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "ScenarioVariantCell" DROP COLUMN "retryTime",
|
||||||
|
DROP COLUMN "statusCode",
|
||||||
|
ADD COLUMN "jobQueuedAt" TIMESTAMP(3),
|
||||||
|
ADD COLUMN "jobStartedAt" TIMESTAMP(3);
|
||||||
|
|
||||||
|
ALTER TABLE "ModelOutput" RENAME TO "ModelResponse";
|
||||||
|
|
||||||
|
ALTER TABLE "ModelResponse"
|
||||||
|
ADD COLUMN "requestedAt" TIMESTAMP(3),
|
||||||
|
ADD COLUMN "receivedAt" TIMESTAMP(3),
|
||||||
|
ADD COLUMN "statusCode" INTEGER,
|
||||||
|
ADD COLUMN "errorMessage" TEXT,
|
||||||
|
ADD COLUMN "retryTime" TIMESTAMP(3),
|
||||||
|
ADD COLUMN "outdated" BOOLEAN NOT NULL DEFAULT false;
|
||||||
|
|
||||||
|
-- 3. Remove the unnecessary column
|
||||||
|
ALTER TABLE "ModelResponse"
|
||||||
|
DROP COLUMN "timeToComplete";
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "ModelResponse" RENAME CONSTRAINT "ModelOutput_pkey" TO "ModelResponse_pkey";
|
||||||
|
ALTER TABLE "ModelResponse" ALTER COLUMN "output" DROP NOT NULL;
|
||||||
|
|
||||||
|
-- DropIndex
|
||||||
|
DROP INDEX "ModelOutput_scenarioVariantCellId_key";
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "ModelResponse" ADD CONSTRAINT "ModelResponse_scenarioVariantCellId_fkey" FOREIGN KEY ("scenarioVariantCellId") REFERENCES "ScenarioVariantCell"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- RenameIndex
|
||||||
|
ALTER INDEX "ModelOutput_inputHash_idx" RENAME TO "ModelResponse_inputHash_idx";
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "OutputEvaluation_modelResponseId_evaluationId_key" ON "OutputEvaluation"("modelResponseId", "evaluationId");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "OutputEvaluation" ADD CONSTRAINT "OutputEvaluation_modelResponseId_fkey" FOREIGN KEY ("modelResponseId") REFERENCES "ModelResponse"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "WorldChampEntrant" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"userId" UUID NOT NULL,
|
||||||
|
"approved" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "WorldChampEntrant_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "WorldChampEntrant_userId_key" ON "WorldChampEntrant"("userId");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "WorldChampEntrant" ADD CONSTRAINT "WorldChampEntrant_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "User" ADD COLUMN "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
ADD COLUMN "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP;
|
||||||
@@ -22,10 +22,10 @@ model Experiment {
|
|||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
TemplateVariable TemplateVariable[]
|
templateVariables TemplateVariable[]
|
||||||
PromptVariant PromptVariant[]
|
promptVariants PromptVariant[]
|
||||||
TestScenario TestScenario[]
|
testScenarios TestScenario[]
|
||||||
Evaluation Evaluation[]
|
evaluations Evaluation[]
|
||||||
}
|
}
|
||||||
|
|
||||||
model PromptVariant {
|
model PromptVariant {
|
||||||
@@ -33,7 +33,9 @@ model PromptVariant {
|
|||||||
|
|
||||||
label String
|
label String
|
||||||
constructFn String
|
constructFn String
|
||||||
|
constructFnVersion Int
|
||||||
model String
|
model String
|
||||||
|
modelProvider String
|
||||||
|
|
||||||
uiId String @default(uuid()) @db.Uuid
|
uiId String @default(uuid()) @db.Uuid
|
||||||
visible Boolean @default(true)
|
visible Boolean @default(true)
|
||||||
@@ -88,16 +90,15 @@ enum CellRetrievalStatus {
|
|||||||
model ScenarioVariantCell {
|
model ScenarioVariantCell {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
|
||||||
statusCode Int?
|
|
||||||
errorMessage String?
|
|
||||||
retryTime DateTime?
|
|
||||||
streamingChannel String?
|
|
||||||
retrievalStatus CellRetrievalStatus @default(COMPLETE)
|
retrievalStatus CellRetrievalStatus @default(COMPLETE)
|
||||||
|
jobQueuedAt DateTime?
|
||||||
modelOutput ModelOutput?
|
jobStartedAt DateTime?
|
||||||
|
modelResponses ModelResponse[]
|
||||||
|
errorMessage String? // Contains errors that occurred independently of model responses
|
||||||
|
|
||||||
promptVariantId String @db.Uuid
|
promptVariantId String @db.Uuid
|
||||||
promptVariant PromptVariant @relation(fields: [promptVariantId], references: [id], onDelete: Cascade)
|
promptVariant PromptVariant @relation(fields: [promptVariantId], references: [id], onDelete: Cascade)
|
||||||
|
prompt Json?
|
||||||
|
|
||||||
testScenarioId String @db.Uuid
|
testScenarioId String @db.Uuid
|
||||||
testScenario TestScenario @relation(fields: [testScenarioId], references: [id], onDelete: Cascade)
|
testScenario TestScenario @relation(fields: [testScenarioId], references: [id], onDelete: Cascade)
|
||||||
@@ -108,24 +109,28 @@ model ScenarioVariantCell {
|
|||||||
@@unique([promptVariantId, testScenarioId])
|
@@unique([promptVariantId, testScenarioId])
|
||||||
}
|
}
|
||||||
|
|
||||||
model ModelOutput {
|
model ModelResponse {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
|
||||||
inputHash String
|
inputHash String
|
||||||
output Json
|
requestedAt DateTime?
|
||||||
timeToComplete Int @default(0)
|
receivedAt DateTime?
|
||||||
|
output Json?
|
||||||
cost Float?
|
cost Float?
|
||||||
promptTokens Int?
|
promptTokens Int?
|
||||||
completionTokens Int?
|
completionTokens Int?
|
||||||
|
statusCode Int?
|
||||||
|
errorMessage String?
|
||||||
|
retryTime DateTime?
|
||||||
|
outdated Boolean @default(false)
|
||||||
|
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
scenarioVariantCellId String @db.Uuid
|
scenarioVariantCellId String @db.Uuid
|
||||||
scenarioVariantCell ScenarioVariantCell @relation(fields: [scenarioVariantCellId], references: [id], onDelete: Cascade)
|
scenarioVariantCell ScenarioVariantCell @relation(fields: [scenarioVariantCellId], references: [id], onDelete: Cascade)
|
||||||
outputEvaluation OutputEvaluation[]
|
outputEvaluations OutputEvaluation[]
|
||||||
|
|
||||||
@@unique([scenarioVariantCellId])
|
|
||||||
@@index([inputHash])
|
@@index([inputHash])
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -147,7 +152,7 @@ model Evaluation {
|
|||||||
|
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
OutputEvaluation OutputEvaluation[]
|
outputEvaluations OutputEvaluation[]
|
||||||
}
|
}
|
||||||
|
|
||||||
model OutputEvaluation {
|
model OutputEvaluation {
|
||||||
@@ -157,8 +162,8 @@ model OutputEvaluation {
|
|||||||
result Float
|
result Float
|
||||||
details String?
|
details String?
|
||||||
|
|
||||||
modelOutputId String @db.Uuid
|
modelResponseId String @db.Uuid
|
||||||
modelOutput ModelOutput @relation(fields: [modelOutputId], references: [id], onDelete: Cascade)
|
modelResponse ModelResponse @relation(fields: [modelResponseId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
evaluationId String @db.Uuid
|
evaluationId String @db.Uuid
|
||||||
evaluation Evaluation @relation(fields: [evaluationId], references: [id], onDelete: Cascade)
|
evaluation Evaluation @relation(fields: [evaluationId], references: [id], onDelete: Cascade)
|
||||||
@@ -166,7 +171,7 @@ model OutputEvaluation {
|
|||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
@@unique([modelOutputId, evaluationId])
|
@@unique([modelResponseId, evaluationId])
|
||||||
}
|
}
|
||||||
|
|
||||||
model Organization {
|
model Organization {
|
||||||
@@ -176,8 +181,8 @@ model Organization {
|
|||||||
|
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
OrganizationUser OrganizationUser[]
|
organizationUsers OrganizationUser[]
|
||||||
Experiment Experiment[]
|
experiments Experiment[]
|
||||||
}
|
}
|
||||||
|
|
||||||
enum OrganizationUserRole {
|
enum OrganizationUserRole {
|
||||||
@@ -203,6 +208,20 @@ model OrganizationUser {
|
|||||||
@@unique([organizationId, userId])
|
@@unique([organizationId, userId])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model WorldChampEntrant {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
|
||||||
|
userId String @db.Uuid
|
||||||
|
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
approved Boolean @default(false)
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
|
@@unique([userId])
|
||||||
|
}
|
||||||
|
|
||||||
model Account {
|
model Account {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
userId String @db.Uuid
|
userId String @db.Uuid
|
||||||
@@ -238,8 +257,12 @@ model User {
|
|||||||
image String?
|
image String?
|
||||||
accounts Account[]
|
accounts Account[]
|
||||||
sessions Session[]
|
sessions Session[]
|
||||||
OrganizationUser OrganizationUser[]
|
organizationUsers OrganizationUser[]
|
||||||
Organization Organization[]
|
organizations Organization[]
|
||||||
|
worldChampEntrant WorldChampEntrant?
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @default(now()) @updatedAt
|
||||||
}
|
}
|
||||||
|
|
||||||
model VerificationToken {
|
model VerificationToken {
|
||||||
|
|||||||
@@ -7,9 +7,13 @@ const defaultId = "11111111-1111-1111-1111-111111111111";
|
|||||||
await prisma.organization.deleteMany({
|
await prisma.organization.deleteMany({
|
||||||
where: { id: defaultId },
|
where: { id: defaultId },
|
||||||
});
|
});
|
||||||
await prisma.organization.create({
|
|
||||||
|
// If there's an existing org, just seed into it
|
||||||
|
const org =
|
||||||
|
(await prisma.organization.findFirst({})) ??
|
||||||
|
(await prisma.organization.create({
|
||||||
data: { id: defaultId },
|
data: { id: defaultId },
|
||||||
});
|
}));
|
||||||
|
|
||||||
await prisma.experiment.deleteMany({
|
await prisma.experiment.deleteMany({
|
||||||
where: {
|
where: {
|
||||||
@@ -21,7 +25,7 @@ await prisma.experiment.create({
|
|||||||
data: {
|
data: {
|
||||||
id: defaultId,
|
id: defaultId,
|
||||||
label: "Country Capitals Example",
|
label: "Country Capitals Example",
|
||||||
organizationId: defaultId,
|
organizationId: org.id,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -46,8 +50,10 @@ await prisma.promptVariant.createMany({
|
|||||||
label: "Prompt Variant 1",
|
label: "Prompt Variant 1",
|
||||||
sortIndex: 0,
|
sortIndex: 0,
|
||||||
model: "gpt-3.5-turbo-0613",
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
modelProvider: "openai/ChatCompletion",
|
||||||
|
constructFnVersion: 1,
|
||||||
constructFn: dedent`
|
constructFn: dedent`
|
||||||
prompt = {
|
definePrompt("openai/ChatCompletion", {
|
||||||
model: "gpt-3.5-turbo-0613",
|
model: "gpt-3.5-turbo-0613",
|
||||||
messages: [
|
messages: [
|
||||||
{
|
{
|
||||||
@@ -56,15 +62,17 @@ await prisma.promptVariant.createMany({
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
temperature: 0,
|
temperature: 0,
|
||||||
}`,
|
})`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
experimentId: defaultId,
|
experimentId: defaultId,
|
||||||
label: "Prompt Variant 2",
|
label: "Prompt Variant 2",
|
||||||
sortIndex: 1,
|
sortIndex: 1,
|
||||||
model: "gpt-3.5-turbo-0613",
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
modelProvider: "openai/ChatCompletion",
|
||||||
|
constructFnVersion: 1,
|
||||||
constructFn: dedent`
|
constructFn: dedent`
|
||||||
prompt = {
|
definePrompt("openai/ChatCompletion", {
|
||||||
model: "gpt-3.5-turbo-0613",
|
model: "gpt-3.5-turbo-0613",
|
||||||
messages: [
|
messages: [
|
||||||
{
|
{
|
||||||
@@ -73,7 +81,7 @@ await prisma.promptVariant.createMany({
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
temperature: 0,
|
temperature: 0,
|
||||||
}`,
|
})`,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
@@ -99,30 +107,41 @@ await prisma.testScenario.deleteMany({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const countries = [
|
||||||
|
"Afghanistan",
|
||||||
|
"Albania",
|
||||||
|
"Algeria",
|
||||||
|
"Andorra",
|
||||||
|
"Angola",
|
||||||
|
"Antigua and Barbuda",
|
||||||
|
"Argentina",
|
||||||
|
"Armenia",
|
||||||
|
"Australia",
|
||||||
|
"Austria",
|
||||||
|
"Austrian Empire",
|
||||||
|
"Azerbaijan",
|
||||||
|
"Baden",
|
||||||
|
"Bahamas, The",
|
||||||
|
"Bahrain",
|
||||||
|
"Bangladesh",
|
||||||
|
"Barbados",
|
||||||
|
"Bavaria",
|
||||||
|
"Belarus",
|
||||||
|
"Belgium",
|
||||||
|
"Belize",
|
||||||
|
"Benin (Dahomey)",
|
||||||
|
"Bolivia",
|
||||||
|
"Bosnia and Herzegovina",
|
||||||
|
"Botswana",
|
||||||
|
];
|
||||||
await prisma.testScenario.createMany({
|
await prisma.testScenario.createMany({
|
||||||
data: [
|
data: countries.map((country, i) => ({
|
||||||
{
|
|
||||||
experimentId: defaultId,
|
experimentId: defaultId,
|
||||||
sortIndex: 0,
|
sortIndex: i,
|
||||||
variableValues: {
|
variableValues: {
|
||||||
country: "Spain",
|
country: country,
|
||||||
},
|
},
|
||||||
},
|
})),
|
||||||
{
|
|
||||||
experimentId: defaultId,
|
|
||||||
sortIndex: 1,
|
|
||||||
variableValues: {
|
|
||||||
country: "USA",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
experimentId: defaultId,
|
|
||||||
sortIndex: 2,
|
|
||||||
variableValues: {
|
|
||||||
country: "Chile",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const variants = await prisma.promptVariant.findMany({
|
const variants = await prisma.promptVariant.findMany({
|
||||||
@@ -145,5 +164,5 @@ await Promise.all(
|
|||||||
testScenarioId: scenario.id,
|
testScenarioId: scenario.id,
|
||||||
})),
|
})),
|
||||||
)
|
)
|
||||||
.map((cell) => generateNewCell(cell.promptVariantId, cell.testScenarioId)),
|
.map((cell) => generateNewCell(cell.promptVariantId, cell.testScenarioId, { stream: false })),
|
||||||
);
|
);
|
||||||
|
|||||||
127
prisma/seedAgiEval.ts
Normal file
127
prisma/seedAgiEval.ts
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import { prisma } from "~/server/db";
|
||||||
|
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||||
|
import dedent from "dedent";
|
||||||
|
import { execSync } from "child_process";
|
||||||
|
import fs from "fs";
|
||||||
|
|
||||||
|
const defaultId = "11111111-1111-1111-1111-111111111112";
|
||||||
|
|
||||||
|
await prisma.organization.deleteMany({
|
||||||
|
where: { id: defaultId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// If there's an existing org, just seed into it
|
||||||
|
const org =
|
||||||
|
(await prisma.organization.findFirst({})) ??
|
||||||
|
(await prisma.organization.create({
|
||||||
|
data: { id: defaultId },
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Clone the repo from git@github.com:microsoft/AGIEval.git into a tmp dir if it doesn't exist
|
||||||
|
const tmpDir = "/tmp/agi-eval";
|
||||||
|
if (!fs.existsSync(tmpDir)) {
|
||||||
|
execSync(`git clone git@github.com:microsoft/AGIEval.git ${tmpDir}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const datasets = [
|
||||||
|
"sat-en",
|
||||||
|
"sat-math",
|
||||||
|
"lsat-rc",
|
||||||
|
"lsat-ar",
|
||||||
|
"aqua-rat",
|
||||||
|
"logiqa-en",
|
||||||
|
"lsat-lr",
|
||||||
|
"math",
|
||||||
|
];
|
||||||
|
|
||||||
|
type Scenario = {
|
||||||
|
passage: string | null;
|
||||||
|
question: string;
|
||||||
|
options: string[] | null;
|
||||||
|
label: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const dataset of datasets) {
|
||||||
|
const experimentName = `AGI-Eval: ${dataset}`;
|
||||||
|
const oldExperiment = await prisma.experiment.findFirst({
|
||||||
|
where: {
|
||||||
|
label: experimentName,
|
||||||
|
organizationId: org.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (oldExperiment) {
|
||||||
|
await prisma.experiment.deleteMany({
|
||||||
|
where: { id: oldExperiment.id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const experiment = await prisma.experiment.create({
|
||||||
|
data: {
|
||||||
|
id: oldExperiment?.id ?? undefined,
|
||||||
|
label: experimentName,
|
||||||
|
organizationId: org.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const scenarios: Scenario[] = fs
|
||||||
|
.readFileSync(`${tmpDir}/data/v1/${dataset}.jsonl`, "utf8")
|
||||||
|
.split("\n")
|
||||||
|
.filter((line) => line.length > 0)
|
||||||
|
.map((line) => JSON.parse(line) as Scenario);
|
||||||
|
console.log("scenarios", scenarios.length);
|
||||||
|
|
||||||
|
await prisma.testScenario.createMany({
|
||||||
|
data: scenarios.slice(0, 30).map((scenario, i) => ({
|
||||||
|
experimentId: experiment.id,
|
||||||
|
sortIndex: i,
|
||||||
|
variableValues: {
|
||||||
|
passage: scenario.passage,
|
||||||
|
question: scenario.question,
|
||||||
|
options: scenario.options?.join("\n"),
|
||||||
|
label: scenario.label,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.templateVariable.createMany({
|
||||||
|
data: ["passage", "question", "options", "label"].map((label) => ({
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label,
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.promptVariant.createMany({
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label: "Prompt Variant 1",
|
||||||
|
sortIndex: 0,
|
||||||
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
modelProvider: "openai/ChatCompletion",
|
||||||
|
constructFnVersion: 1,
|
||||||
|
constructFn: dedent`
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`Passage: ${"$"}{scenario.passage}\n\nQuestion: ${"$"}{scenario.question}\n\nOptions: ${"$"}{scenario.options}\n\n Respond with just the letter of the best option in the format Answer: (A).\`
|
||||||
|
}
|
||||||
|
],
|
||||||
|
temperature: 0,
|
||||||
|
})`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.evaluation.createMany({
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label: "Eval",
|
||||||
|
evalType: "CONTAINS",
|
||||||
|
value: "Answer: ({{label}})",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
}
|
||||||
113
prisma/seedTwitterSentiment.ts
Normal file
113
prisma/seedTwitterSentiment.ts
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
import { prisma } from "~/server/db";
|
||||||
|
import dedent from "dedent";
|
||||||
|
import fs from "fs";
|
||||||
|
import { parse } from "csv-parse/sync";
|
||||||
|
|
||||||
|
const defaultId = "11111111-1111-1111-1111-111111111112";
|
||||||
|
|
||||||
|
await prisma.organization.deleteMany({
|
||||||
|
where: { id: defaultId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// If there's an existing org, just seed into it
|
||||||
|
const org =
|
||||||
|
(await prisma.organization.findFirst({})) ??
|
||||||
|
(await prisma.organization.create({
|
||||||
|
data: { id: defaultId },
|
||||||
|
}));
|
||||||
|
|
||||||
|
type Scenario = {
|
||||||
|
text: string;
|
||||||
|
sentiment: string;
|
||||||
|
emotion: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const experimentName = `Twitter Sentiment Analysis`;
|
||||||
|
const oldExperiment = await prisma.experiment.findFirst({
|
||||||
|
where: {
|
||||||
|
label: experimentName,
|
||||||
|
organizationId: org.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (oldExperiment) {
|
||||||
|
await prisma.experiment.deleteMany({
|
||||||
|
where: { id: oldExperiment.id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const experiment = await prisma.experiment.create({
|
||||||
|
data: {
|
||||||
|
id: oldExperiment?.id ?? undefined,
|
||||||
|
label: experimentName,
|
||||||
|
organizationId: org.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = fs.readFileSync("./prisma/datasets/validated_tweets.csv", "utf8");
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
const records: any[] = parse(content, { delimiter: ",", from_line: 2 });
|
||||||
|
|
||||||
|
console.log("records", records);
|
||||||
|
|
||||||
|
const scenarios: Scenario[] = records.map((row) => ({
|
||||||
|
text: row[0],
|
||||||
|
sentiment: row[1],
|
||||||
|
emotion: row[2],
|
||||||
|
}));
|
||||||
|
|
||||||
|
console.log("scenarios", scenarios.length);
|
||||||
|
|
||||||
|
await prisma.testScenario.createMany({
|
||||||
|
data: scenarios.slice(0, 30).map((scenario, i) => ({
|
||||||
|
experimentId: experiment.id,
|
||||||
|
sortIndex: i,
|
||||||
|
variableValues: {
|
||||||
|
text: scenario.text,
|
||||||
|
sentiment: scenario.sentiment,
|
||||||
|
emotion: scenario.emotion,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.templateVariable.createMany({
|
||||||
|
data: ["text", "sentiment", "emotion"].map((label) => ({
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label,
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.promptVariant.createMany({
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label: "Prompt Variant 1",
|
||||||
|
sortIndex: 0,
|
||||||
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
modelProvider: "openai/ChatCompletion",
|
||||||
|
constructFnVersion: 1,
|
||||||
|
constructFn: dedent`
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`Text: ${"$"}{scenario.text}\n\nRespond with the sentiment (negative|neutral|positive) and emotion (optimism|joy|anger|sadness) of the tweet in this format: "answer: <sentiment>-<emotion>".\`
|
||||||
|
}
|
||||||
|
],
|
||||||
|
temperature: 0,
|
||||||
|
})`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.evaluation.createMany({
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label: "Eval",
|
||||||
|
evalType: "CONTAINS",
|
||||||
|
value: "answer: {{sentiment}}-{{emotion}}",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
BIN
public/fonts/Inconsolata_SemiExpanded-Medium.ttf
Normal file
BIN
public/fonts/Inconsolata_SemiExpanded-Medium.ttf
Normal file
Binary file not shown.
BIN
public/og.png
Normal file
BIN
public/og.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 62 KiB |
@@ -6,4 +6,7 @@ echo "Migrating the database"
|
|||||||
pnpm prisma migrate deploy
|
pnpm prisma migrate deploy
|
||||||
|
|
||||||
echo "Starting the server"
|
echo "Starting the server"
|
||||||
pnpm start
|
|
||||||
|
pnpm concurrently --kill-others \
|
||||||
|
"pnpm start" \
|
||||||
|
"pnpm tsx src/server/tasks/worker.ts"
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
|
||||||
|
|
||||||
import YAML from "yaml";
|
|
||||||
import fs from "fs";
|
|
||||||
import path from "path";
|
|
||||||
import { openapiSchemaToJsonSchema } from "@openapi-contrib/openapi-schema-to-json-schema";
|
|
||||||
import assert from "assert";
|
|
||||||
import { type AcceptibleInputSchema } from "@openapi-contrib/openapi-schema-to-json-schema/dist/mjs/openapi-schema-types";
|
|
||||||
|
|
||||||
const OPENAPI_URL =
|
|
||||||
"https://raw.githubusercontent.com/openai/openai-openapi/0c432eb66fd0c758fd8b9bd69db41c1096e5f4db/openapi.yaml";
|
|
||||||
|
|
||||||
const convertOpenApiToJsonSchema = async (url: string) => {
|
|
||||||
// Fetch the openapi document
|
|
||||||
const response = await fetch(url);
|
|
||||||
const openApiYaml = await response.text();
|
|
||||||
|
|
||||||
// Parse the yaml document
|
|
||||||
const openApiDocument = YAML.parse(openApiYaml) as AcceptibleInputSchema;
|
|
||||||
|
|
||||||
// Convert the openapi schema to json schema
|
|
||||||
const jsonSchema = openapiSchemaToJsonSchema(openApiDocument);
|
|
||||||
|
|
||||||
const modelProperty = jsonSchema.components.schemas.CreateChatCompletionRequest.properties.model;
|
|
||||||
|
|
||||||
assert(modelProperty.oneOf.length === 2, "Expected model to have oneOf length of 2");
|
|
||||||
|
|
||||||
// We need to do a bit of surgery here since the Monaco editor doesn't like
|
|
||||||
// the fact that the schema says `model` can be either a string or an enum,
|
|
||||||
// and displays a warning in the editor. Let's stick with just an enum for
|
|
||||||
// now and drop the string option.
|
|
||||||
modelProperty.type = "string";
|
|
||||||
modelProperty.enum = modelProperty.oneOf[1].enum;
|
|
||||||
modelProperty.oneOf = undefined;
|
|
||||||
|
|
||||||
// Get the directory of the current script
|
|
||||||
const currentDirectory = path.dirname(import.meta.url).replace("file://", "");
|
|
||||||
|
|
||||||
// Write the JSON schema to a file in the current directory
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(currentDirectory, "openai.schema.json"),
|
|
||||||
JSON.stringify(jsonSchema, null, 2),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
convertOpenApiToJsonSchema(OPENAPI_URL)
|
|
||||||
.then(() => console.log("JSON schema has been written successfully."))
|
|
||||||
.catch((err) => console.error(err));
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
import fs from "fs";
|
|
||||||
import path from "path";
|
|
||||||
import openapiTS, { type OpenAPI3 } from "openapi-typescript";
|
|
||||||
import YAML from "yaml";
|
|
||||||
import { pick } from "lodash-es";
|
|
||||||
import assert from "assert";
|
|
||||||
|
|
||||||
const OPENAPI_URL =
|
|
||||||
"https://raw.githubusercontent.com/openai/openai-openapi/0c432eb66fd0c758fd8b9bd69db41c1096e5f4db/openapi.yaml";
|
|
||||||
|
|
||||||
// Generate TypeScript types from OpenAPI
|
|
||||||
|
|
||||||
const schema = await fetch(OPENAPI_URL)
|
|
||||||
.then((res) => res.text())
|
|
||||||
.then((txt) => YAML.parse(txt) as OpenAPI3);
|
|
||||||
|
|
||||||
console.log(schema.components?.schemas?.CreateChatCompletionRequest);
|
|
||||||
|
|
||||||
// @ts-expect-error just assume this works, the assert will catch it if it doesn't
|
|
||||||
const modelProperty = schema.components?.schemas?.CreateChatCompletionRequest?.properties?.model;
|
|
||||||
|
|
||||||
assert(modelProperty.oneOf.length === 2, "Expected model to have oneOf length of 2");
|
|
||||||
|
|
||||||
// We need to do a bit of surgery here since the Monaco editor doesn't like
|
|
||||||
// the fact that the schema says `model` can be either a string or an enum,
|
|
||||||
// and displays a warning in the editor. Let's stick with just an enum for
|
|
||||||
// now and drop the string option.
|
|
||||||
modelProperty.type = "string";
|
|
||||||
modelProperty.enum = modelProperty.oneOf[1].enum;
|
|
||||||
modelProperty.oneOf = undefined;
|
|
||||||
|
|
||||||
delete schema["paths"];
|
|
||||||
assert(schema.components?.schemas);
|
|
||||||
schema.components.schemas = pick(schema.components?.schemas, [
|
|
||||||
"CreateChatCompletionRequest",
|
|
||||||
"ChatCompletionRequestMessage",
|
|
||||||
"ChatCompletionFunctions",
|
|
||||||
"ChatCompletionFunctionParameters",
|
|
||||||
]);
|
|
||||||
console.log(schema);
|
|
||||||
|
|
||||||
let openApiTypes = await openapiTS(schema);
|
|
||||||
|
|
||||||
// Remove the `export` from any line that starts with `export`
|
|
||||||
openApiTypes = openApiTypes.replaceAll("\nexport ", "\n");
|
|
||||||
|
|
||||||
// Get the directory of the current script
|
|
||||||
const currentDirectory = path.dirname(import.meta.url).replace("file://", "");
|
|
||||||
|
|
||||||
// Write the TypeScript types. We only want to use this in our in-app editor, so
|
|
||||||
// save as a .txt so VS Code doesn't try to auto-import definitions from it.
|
|
||||||
fs.writeFileSync(path.join(currentDirectory, "openai.types.ts.txt"), openApiTypes);
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,148 +0,0 @@
|
|||||||
/**
|
|
||||||
* This file was auto-generated by openapi-typescript.
|
|
||||||
* Do not make direct changes to the file.
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
/** OneOf type helpers */
|
|
||||||
type Without<T, U> = { [P in Exclude<keyof T, keyof U>]?: never };
|
|
||||||
type XOR<T, U> = (T | U) extends object ? (Without<T, U> & U) | (Without<U, T> & T) : T | U;
|
|
||||||
type OneOf<T extends any[]> = T extends [infer Only] ? Only : T extends [infer A, infer B, ...infer Rest] ? OneOf<[XOR<A, B>, ...Rest]> : never;
|
|
||||||
|
|
||||||
type paths = Record<string, never>;
|
|
||||||
|
|
||||||
type webhooks = Record<string, never>;
|
|
||||||
|
|
||||||
interface components {
|
|
||||||
schemas: {
|
|
||||||
CreateChatCompletionRequest: {
|
|
||||||
/**
|
|
||||||
* @description ID of the model to use. See the [model endpoint compatibility](/docs/models/model-endpoint-compatibility) table for details on which models work with the Chat API.
|
|
||||||
* @example gpt-3.5-turbo
|
|
||||||
* @enum {string}
|
|
||||||
*/
|
|
||||||
model: "gpt-4" | "gpt-4-0613" | "gpt-4-32k" | "gpt-4-32k-0613" | "gpt-3.5-turbo" | "gpt-3.5-turbo-16k" | "gpt-3.5-turbo-0613" | "gpt-3.5-turbo-16k-0613";
|
|
||||||
/** @description A list of messages comprising the conversation so far. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb). */
|
|
||||||
messages: (components["schemas"]["ChatCompletionRequestMessage"])[];
|
|
||||||
/** @description A list of functions the model may generate JSON inputs for. */
|
|
||||||
functions?: (components["schemas"]["ChatCompletionFunctions"])[];
|
|
||||||
/** @description Controls how the model responds to function calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between an end-user or calling a function. Specifying a particular function via `{"name":\ "my_function"}` forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present. */
|
|
||||||
function_call?: OneOf<["none" | "auto", {
|
|
||||||
/** @description The name of the function to call. */
|
|
||||||
name: string;
|
|
||||||
}]>;
|
|
||||||
/**
|
|
||||||
* @description What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.
|
|
||||||
*
|
|
||||||
* We generally recommend altering this or `top_p` but not both.
|
|
||||||
*
|
|
||||||
* @default 1
|
|
||||||
* @example 1
|
|
||||||
*/
|
|
||||||
temperature?: number | null;
|
|
||||||
/**
|
|
||||||
* @description An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
|
|
||||||
*
|
|
||||||
* We generally recommend altering this or `temperature` but not both.
|
|
||||||
*
|
|
||||||
* @default 1
|
|
||||||
* @example 1
|
|
||||||
*/
|
|
||||||
top_p?: number | null;
|
|
||||||
/**
|
|
||||||
* @description How many chat completion choices to generate for each input message.
|
|
||||||
* @default 1
|
|
||||||
* @example 1
|
|
||||||
*/
|
|
||||||
n?: number | null;
|
|
||||||
/**
|
|
||||||
* @description If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) as they become available, with the stream terminated by a `data: [DONE]` message. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb).
|
|
||||||
*
|
|
||||||
* @default false
|
|
||||||
*/
|
|
||||||
stream?: boolean | null;
|
|
||||||
/**
|
|
||||||
* @description Up to 4 sequences where the API will stop generating further tokens.
|
|
||||||
*
|
|
||||||
* @default null
|
|
||||||
*/
|
|
||||||
stop?: (string | null) | (string)[];
|
|
||||||
/**
|
|
||||||
* @description The maximum number of [tokens](/tokenizer) to generate in the chat completion.
|
|
||||||
*
|
|
||||||
* The total length of input tokens and generated tokens is limited by the model's context length. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.
|
|
||||||
*
|
|
||||||
* @default inf
|
|
||||||
*/
|
|
||||||
max_tokens?: number;
|
|
||||||
/**
|
|
||||||
* @description Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.
|
|
||||||
*
|
|
||||||
* [See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)
|
|
||||||
*
|
|
||||||
* @default 0
|
|
||||||
*/
|
|
||||||
presence_penalty?: number | null;
|
|
||||||
/**
|
|
||||||
* @description Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
|
|
||||||
*
|
|
||||||
* [See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)
|
|
||||||
*
|
|
||||||
* @default 0
|
|
||||||
*/
|
|
||||||
frequency_penalty?: number | null;
|
|
||||||
/**
|
|
||||||
* @description Modify the likelihood of specified tokens appearing in the completion.
|
|
||||||
*
|
|
||||||
* Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.
|
|
||||||
*
|
|
||||||
* @default null
|
|
||||||
*/
|
|
||||||
logit_bias?: Record<string, unknown> | null;
|
|
||||||
/**
|
|
||||||
* @description A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).
|
|
||||||
*
|
|
||||||
* @example user-1234
|
|
||||||
*/
|
|
||||||
user?: string;
|
|
||||||
};
|
|
||||||
ChatCompletionRequestMessage: {
|
|
||||||
/**
|
|
||||||
* @description The role of the messages author. One of `system`, `user`, `assistant`, or `function`.
|
|
||||||
* @enum {string}
|
|
||||||
*/
|
|
||||||
role: "system" | "user" | "assistant" | "function";
|
|
||||||
/** @description The contents of the message. `content` is required for all messages except assistant messages with function calls. */
|
|
||||||
content?: string;
|
|
||||||
/** @description The name of the author of this message. `name` is required if role is `function`, and it should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. */
|
|
||||||
name?: string;
|
|
||||||
/** @description The name and arguments of a function that should be called, as generated by the model. */
|
|
||||||
function_call?: {
|
|
||||||
/** @description The name of the function to call. */
|
|
||||||
name?: string;
|
|
||||||
/** @description The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function. */
|
|
||||||
arguments?: string;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
ChatCompletionFunctions: {
|
|
||||||
/** @description The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64. */
|
|
||||||
name: string;
|
|
||||||
/** @description The description of what the function does. */
|
|
||||||
description?: string;
|
|
||||||
parameters?: components["schemas"]["ChatCompletionFunctionParameters"];
|
|
||||||
};
|
|
||||||
/** @description The parameters the functions accepts, described as a JSON Schema object. See the [guide](/docs/guides/gpt/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format. */
|
|
||||||
ChatCompletionFunctionParameters: {
|
|
||||||
[key: string]: unknown;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
responses: never;
|
|
||||||
parameters: never;
|
|
||||||
requestBodies: never;
|
|
||||||
headers: never;
|
|
||||||
pathItems: never;
|
|
||||||
}
|
|
||||||
|
|
||||||
type external = Record<string, never>;
|
|
||||||
|
|
||||||
type operations = Record<string, never>;
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"target": "esnext",
|
|
||||||
"moduleResolution": "nodenext"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,19 +1,22 @@
|
|||||||
import { Textarea, type TextareaProps } from "@chakra-ui/react";
|
import { Textarea, type TextareaProps } from "@chakra-ui/react";
|
||||||
import ResizeTextarea from "react-textarea-autosize";
|
import ResizeTextarea from "react-textarea-autosize";
|
||||||
import React from "react";
|
import React, { useLayoutEffect, useState } from "react";
|
||||||
|
|
||||||
export const AutoResizeTextarea: React.ForwardRefRenderFunction<
|
export const AutoResizeTextarea: React.ForwardRefRenderFunction<
|
||||||
HTMLTextAreaElement,
|
HTMLTextAreaElement,
|
||||||
TextareaProps & { minRows?: number }
|
TextareaProps & { minRows?: number }
|
||||||
> = (props, ref) => {
|
> = ({ minRows = 1, overflowY = "hidden", ...props }, ref) => {
|
||||||
|
const [isRerendered, setIsRerendered] = useState(false);
|
||||||
|
useLayoutEffect(() => setIsRerendered(true), []);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Textarea
|
<Textarea
|
||||||
minH="unset"
|
minH="unset"
|
||||||
overflow="hidden"
|
minRows={minRows}
|
||||||
|
overflowY={isRerendered ? overflowY : "hidden"}
|
||||||
w="100%"
|
w="100%"
|
||||||
resize="none"
|
resize="none"
|
||||||
ref={ref}
|
ref={ref}
|
||||||
minRows={1}
|
|
||||||
transition="height none"
|
transition="height none"
|
||||||
as={ResizeTextarea}
|
as={ResizeTextarea}
|
||||||
{...props}
|
{...props}
|
||||||
|
|||||||
142
src/components/ChangeModelModal/ChangeModelModal.tsx
Normal file
142
src/components/ChangeModelModal/ChangeModelModal.tsx
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
import {
|
||||||
|
Button,
|
||||||
|
HStack,
|
||||||
|
Icon,
|
||||||
|
Modal,
|
||||||
|
ModalBody,
|
||||||
|
ModalCloseButton,
|
||||||
|
ModalContent,
|
||||||
|
ModalFooter,
|
||||||
|
ModalHeader,
|
||||||
|
ModalOverlay,
|
||||||
|
Spinner,
|
||||||
|
Text,
|
||||||
|
VStack,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
|
import { type PromptVariant } from "@prisma/client";
|
||||||
|
import { isObject, isString } from "lodash-es";
|
||||||
|
import { useState } from "react";
|
||||||
|
import { RiExchangeFundsFill } from "react-icons/ri";
|
||||||
|
import { type ProviderModel } from "~/modelProviders/types";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import { useExperiment, useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||||
|
import { lookupModel, modelLabel } from "~/utils/utils";
|
||||||
|
import CompareFunctions from "../RefinePromptModal/CompareFunctions";
|
||||||
|
import { ModelSearch } from "./ModelSearch";
|
||||||
|
import { ModelStatsCard } from "./ModelStatsCard";
|
||||||
|
|
||||||
|
export const ChangeModelModal = ({
|
||||||
|
variant,
|
||||||
|
onClose,
|
||||||
|
}: {
|
||||||
|
variant: PromptVariant;
|
||||||
|
onClose: () => void;
|
||||||
|
}) => {
|
||||||
|
const originalModel = lookupModel(variant.modelProvider, variant.model);
|
||||||
|
const [selectedModel, setSelectedModel] = useState({
|
||||||
|
provider: variant.modelProvider,
|
||||||
|
model: variant.model,
|
||||||
|
} as ProviderModel);
|
||||||
|
const [convertedModel, setConvertedModel] = useState<ProviderModel | undefined>();
|
||||||
|
const visibleScenarios = useVisibleScenarioIds();
|
||||||
|
|
||||||
|
const utils = api.useContext();
|
||||||
|
|
||||||
|
const experiment = useExperiment();
|
||||||
|
|
||||||
|
const { mutateAsync: getModifiedPromptMutateAsync, data: modifiedPromptFn } =
|
||||||
|
api.promptVariants.getModifiedPromptFn.useMutation();
|
||||||
|
|
||||||
|
const [getModifiedPromptFn, modificationInProgress] = useHandledAsyncCallback(async () => {
|
||||||
|
if (!experiment) return;
|
||||||
|
|
||||||
|
await getModifiedPromptMutateAsync({
|
||||||
|
id: variant.id,
|
||||||
|
newModel: selectedModel,
|
||||||
|
});
|
||||||
|
setConvertedModel(selectedModel);
|
||||||
|
}, [getModifiedPromptMutateAsync, onClose, experiment, variant, selectedModel]);
|
||||||
|
|
||||||
|
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
|
||||||
|
|
||||||
|
const [replaceVariant, replacementInProgress] = useHandledAsyncCallback(async () => {
|
||||||
|
if (
|
||||||
|
!variant.experimentId ||
|
||||||
|
!modifiedPromptFn ||
|
||||||
|
(isObject(modifiedPromptFn) && "status" in modifiedPromptFn)
|
||||||
|
)
|
||||||
|
return;
|
||||||
|
await replaceVariantMutation.mutateAsync({
|
||||||
|
id: variant.id,
|
||||||
|
constructFn: modifiedPromptFn,
|
||||||
|
streamScenarios: visibleScenarios,
|
||||||
|
});
|
||||||
|
await utils.promptVariants.list.invalidate();
|
||||||
|
onClose();
|
||||||
|
}, [replaceVariantMutation, variant, onClose, modifiedPromptFn]);
|
||||||
|
|
||||||
|
const originalLabel = modelLabel(variant.modelProvider, variant.model);
|
||||||
|
const selectedLabel = modelLabel(selectedModel.provider, selectedModel.model);
|
||||||
|
const convertedLabel =
|
||||||
|
convertedModel && modelLabel(convertedModel.provider, convertedModel.model);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Modal
|
||||||
|
isOpen
|
||||||
|
onClose={onClose}
|
||||||
|
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "5xl", xl: "7xl" }}
|
||||||
|
>
|
||||||
|
<ModalOverlay />
|
||||||
|
<ModalContent w={1200}>
|
||||||
|
<ModalHeader>
|
||||||
|
<HStack>
|
||||||
|
<Icon as={RiExchangeFundsFill} />
|
||||||
|
<Text>Change Model</Text>
|
||||||
|
</HStack>
|
||||||
|
</ModalHeader>
|
||||||
|
<ModalCloseButton />
|
||||||
|
<ModalBody maxW="unset">
|
||||||
|
<VStack spacing={8}>
|
||||||
|
<ModelStatsCard label="Original Model" model={originalModel} />
|
||||||
|
{originalLabel !== selectedLabel && (
|
||||||
|
<ModelStatsCard
|
||||||
|
label="New Model"
|
||||||
|
model={lookupModel(selectedModel.provider, selectedModel.model)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<ModelSearch selectedModel={selectedModel} setSelectedModel={setSelectedModel} />
|
||||||
|
{isString(modifiedPromptFn) && (
|
||||||
|
<CompareFunctions
|
||||||
|
originalFunction={variant.constructFn}
|
||||||
|
newFunction={modifiedPromptFn}
|
||||||
|
leftTitle={originalLabel}
|
||||||
|
rightTitle={convertedLabel}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</VStack>
|
||||||
|
</ModalBody>
|
||||||
|
|
||||||
|
<ModalFooter>
|
||||||
|
<HStack>
|
||||||
|
<Button
|
||||||
|
colorScheme="gray"
|
||||||
|
onClick={getModifiedPromptFn}
|
||||||
|
minW={24}
|
||||||
|
isDisabled={originalLabel === selectedLabel || modificationInProgress}
|
||||||
|
>
|
||||||
|
{modificationInProgress ? <Spinner boxSize={4} /> : <Text>Convert</Text>}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
colorScheme="blue"
|
||||||
|
onClick={replaceVariant}
|
||||||
|
minW={24}
|
||||||
|
isDisabled={!convertedModel || modificationInProgress || replacementInProgress}
|
||||||
|
>
|
||||||
|
{replacementInProgress ? <Spinner boxSize={4} /> : <Text>Accept</Text>}
|
||||||
|
</Button>
|
||||||
|
</HStack>
|
||||||
|
</ModalFooter>
|
||||||
|
</ModalContent>
|
||||||
|
</Modal>
|
||||||
|
);
|
||||||
|
};
|
||||||
36
src/components/ChangeModelModal/ModelSearch.tsx
Normal file
36
src/components/ChangeModelModal/ModelSearch.tsx
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import { Text, VStack } from "@chakra-ui/react";
|
||||||
|
import { type LegacyRef } from "react";
|
||||||
|
import Select from "react-select";
|
||||||
|
import { useElementDimensions } from "~/utils/hooks";
|
||||||
|
|
||||||
|
import { flatMap } from "lodash-es";
|
||||||
|
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
|
||||||
|
import { type ProviderModel } from "~/modelProviders/types";
|
||||||
|
import { modelLabel } from "~/utils/utils";
|
||||||
|
|
||||||
|
const modelOptions = flatMap(Object.entries(frontendModelProviders), ([providerId, provider]) =>
|
||||||
|
Object.entries(provider.models).map(([modelId]) => ({
|
||||||
|
provider: providerId,
|
||||||
|
model: modelId,
|
||||||
|
})),
|
||||||
|
) as ProviderModel[];
|
||||||
|
|
||||||
|
export const ModelSearch = (props: {
|
||||||
|
selectedModel: ProviderModel;
|
||||||
|
setSelectedModel: (model: ProviderModel) => void;
|
||||||
|
}) => {
|
||||||
|
const [containerRef, containerDimensions] = useElementDimensions();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<VStack ref={containerRef as LegacyRef<HTMLDivElement>} w="full">
|
||||||
|
<Text>Browse Models</Text>
|
||||||
|
<Select<ProviderModel>
|
||||||
|
styles={{ control: (provided) => ({ ...provided, width: containerDimensions?.width }) }}
|
||||||
|
getOptionLabel={(data) => modelLabel(data.provider, data.model)}
|
||||||
|
getOptionValue={(data) => modelLabel(data.provider, data.model)}
|
||||||
|
options={modelOptions}
|
||||||
|
onChange={(option) => option && props.setSelectedModel(option)}
|
||||||
|
/>
|
||||||
|
</VStack>
|
||||||
|
);
|
||||||
|
};
|
||||||
109
src/components/ChangeModelModal/ModelStatsCard.tsx
Normal file
109
src/components/ChangeModelModal/ModelStatsCard.tsx
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import {
|
||||||
|
GridItem,
|
||||||
|
HStack,
|
||||||
|
Link,
|
||||||
|
SimpleGrid,
|
||||||
|
Text,
|
||||||
|
VStack,
|
||||||
|
type StackProps,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
|
import { type lookupModel } from "~/utils/utils";
|
||||||
|
|
||||||
|
export const ModelStatsCard = ({
|
||||||
|
label,
|
||||||
|
model,
|
||||||
|
}: {
|
||||||
|
label: string;
|
||||||
|
model: ReturnType<typeof lookupModel>;
|
||||||
|
}) => {
|
||||||
|
if (!model) return null;
|
||||||
|
return (
|
||||||
|
<VStack w="full" align="start">
|
||||||
|
<Text fontWeight="bold" fontSize="sm" textTransform="uppercase">
|
||||||
|
{label}
|
||||||
|
</Text>
|
||||||
|
|
||||||
|
<VStack w="full" spacing={6} bgColor="gray.100" p={4} borderRadius={4}>
|
||||||
|
<HStack w="full" align="flex-start">
|
||||||
|
<Text flex={1} fontSize="lg">
|
||||||
|
<Text as="span" color="gray.600">
|
||||||
|
{model.provider} /{" "}
|
||||||
|
</Text>
|
||||||
|
<Text as="span" fontWeight="bold" color="gray.900">
|
||||||
|
{model.name}
|
||||||
|
</Text>
|
||||||
|
</Text>
|
||||||
|
<Link
|
||||||
|
href={model.learnMoreUrl}
|
||||||
|
isExternal
|
||||||
|
color="blue.500"
|
||||||
|
fontWeight="bold"
|
||||||
|
fontSize="sm"
|
||||||
|
ml={2}
|
||||||
|
>
|
||||||
|
Learn More
|
||||||
|
</Link>
|
||||||
|
</HStack>
|
||||||
|
<SimpleGrid
|
||||||
|
w="full"
|
||||||
|
justifyContent="space-between"
|
||||||
|
alignItems="flex-start"
|
||||||
|
fontSize="sm"
|
||||||
|
columns={{ base: 2, md: 4 }}
|
||||||
|
>
|
||||||
|
<SelectedModelLabeledInfo label="Context Window" info={model.contextWindow} />
|
||||||
|
{model.promptTokenPrice && (
|
||||||
|
<SelectedModelLabeledInfo
|
||||||
|
label="Input"
|
||||||
|
info={
|
||||||
|
<Text>
|
||||||
|
${(model.promptTokenPrice * 1000).toFixed(3)}
|
||||||
|
<Text color="gray.500"> / 1K tokens</Text>
|
||||||
|
</Text>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{model.completionTokenPrice && (
|
||||||
|
<SelectedModelLabeledInfo
|
||||||
|
label="Output"
|
||||||
|
info={
|
||||||
|
<Text>
|
||||||
|
${(model.completionTokenPrice * 1000).toFixed(3)}
|
||||||
|
<Text color="gray.500"> / 1K tokens</Text>
|
||||||
|
</Text>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{model.pricePerSecond && (
|
||||||
|
<SelectedModelLabeledInfo
|
||||||
|
label="Price"
|
||||||
|
info={
|
||||||
|
<Text>
|
||||||
|
${model.pricePerSecond.toFixed(3)}
|
||||||
|
<Text color="gray.500"> / second</Text>
|
||||||
|
</Text>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<SelectedModelLabeledInfo label="Speed" info={<Text>{model.speed}</Text>} />
|
||||||
|
</SimpleGrid>
|
||||||
|
</VStack>
|
||||||
|
</VStack>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const SelectedModelLabeledInfo = ({
|
||||||
|
label,
|
||||||
|
info,
|
||||||
|
...props
|
||||||
|
}: {
|
||||||
|
label: string;
|
||||||
|
info: string | number | React.ReactElement;
|
||||||
|
} & StackProps) => (
|
||||||
|
<GridItem>
|
||||||
|
<VStack alignItems="flex-start" {...props}>
|
||||||
|
<Text fontWeight="bold">{label}</Text>
|
||||||
|
<Text>{info}</Text>
|
||||||
|
</VStack>
|
||||||
|
</GridItem>
|
||||||
|
);
|
||||||
69
src/components/ExperimentSettingsDrawer/DeleteButton.tsx
Normal file
69
src/components/ExperimentSettingsDrawer/DeleteButton.tsx
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import {
|
||||||
|
Button,
|
||||||
|
Icon,
|
||||||
|
AlertDialog,
|
||||||
|
AlertDialogBody,
|
||||||
|
AlertDialogFooter,
|
||||||
|
AlertDialogHeader,
|
||||||
|
AlertDialogContent,
|
||||||
|
AlertDialogOverlay,
|
||||||
|
useDisclosure,
|
||||||
|
Text,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
|
|
||||||
|
import { useRouter } from "next/router";
|
||||||
|
import { useRef } from "react";
|
||||||
|
import { BsTrash } from "react-icons/bs";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
|
|
||||||
|
export const DeleteButton = () => {
|
||||||
|
const experiment = useExperiment();
|
||||||
|
const mutation = api.experiments.delete.useMutation();
|
||||||
|
const utils = api.useContext();
|
||||||
|
const router = useRouter();
|
||||||
|
|
||||||
|
const { isOpen, onOpen, onClose } = useDisclosure();
|
||||||
|
const cancelRef = useRef<HTMLButtonElement>(null);
|
||||||
|
|
||||||
|
const [onDeleteConfirm] = useHandledAsyncCallback(async () => {
|
||||||
|
if (!experiment.data?.id) return;
|
||||||
|
await mutation.mutateAsync({ id: experiment.data.id });
|
||||||
|
await utils.experiments.list.invalidate();
|
||||||
|
await router.push({ pathname: "/experiments" });
|
||||||
|
onClose();
|
||||||
|
}, [mutation, experiment.data?.id, router]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Button size="sm" variant="ghost" colorScheme="red" fontWeight="normal" onClick={onOpen}>
|
||||||
|
<Icon as={BsTrash} boxSize={4} />
|
||||||
|
<Text ml={2}>Delete Experiment</Text>
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
<AlertDialog isOpen={isOpen} leastDestructiveRef={cancelRef} onClose={onClose}>
|
||||||
|
<AlertDialogOverlay>
|
||||||
|
<AlertDialogContent>
|
||||||
|
<AlertDialogHeader fontSize="lg" fontWeight="bold">
|
||||||
|
Delete Experiment
|
||||||
|
</AlertDialogHeader>
|
||||||
|
|
||||||
|
<AlertDialogBody>
|
||||||
|
If you delete this experiment all the associated prompts and scenarios will be deleted
|
||||||
|
as well. Are you sure?
|
||||||
|
</AlertDialogBody>
|
||||||
|
|
||||||
|
<AlertDialogFooter>
|
||||||
|
<Button ref={cancelRef} onClick={onClose}>
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
<Button colorScheme="red" onClick={onDeleteConfirm} ml={3}>
|
||||||
|
Delete
|
||||||
|
</Button>
|
||||||
|
</AlertDialogFooter>
|
||||||
|
</AlertDialogContent>
|
||||||
|
</AlertDialogOverlay>
|
||||||
|
</AlertDialog>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -6,13 +6,14 @@ import {
|
|||||||
DrawerHeader,
|
DrawerHeader,
|
||||||
DrawerOverlay,
|
DrawerOverlay,
|
||||||
Heading,
|
Heading,
|
||||||
Stack,
|
VStack,
|
||||||
} from "@chakra-ui/react";
|
} from "@chakra-ui/react";
|
||||||
import EditScenarioVars from "./EditScenarioVars";
|
import EditScenarioVars from "../OutputsTable/EditScenarioVars";
|
||||||
import EditEvaluations from "./EditEvaluations";
|
import EditEvaluations from "../OutputsTable/EditEvaluations";
|
||||||
import { useAppStore } from "~/state/store";
|
import { useAppStore } from "~/state/store";
|
||||||
|
import { DeleteButton } from "./DeleteButton";
|
||||||
|
|
||||||
export default function SettingsDrawer() {
|
export default function ExperimentSettingsDrawer() {
|
||||||
const isOpen = useAppStore((state) => state.drawerOpen);
|
const isOpen = useAppStore((state) => state.drawerOpen);
|
||||||
const closeDrawer = useAppStore((state) => state.closeDrawer);
|
const closeDrawer = useAppStore((state) => state.closeDrawer);
|
||||||
|
|
||||||
@@ -22,13 +23,16 @@ export default function SettingsDrawer() {
|
|||||||
<DrawerContent>
|
<DrawerContent>
|
||||||
<DrawerCloseButton />
|
<DrawerCloseButton />
|
||||||
<DrawerHeader>
|
<DrawerHeader>
|
||||||
<Heading size="md">Settings</Heading>
|
<Heading size="md">Experiment Settings</Heading>
|
||||||
</DrawerHeader>
|
</DrawerHeader>
|
||||||
<DrawerBody>
|
<DrawerBody h="full" pb={4}>
|
||||||
<Stack spacing={6}>
|
<VStack h="full" justifyContent="space-between">
|
||||||
|
<VStack spacing={6}>
|
||||||
<EditScenarioVars />
|
<EditScenarioVars />
|
||||||
<EditEvaluations />
|
<EditEvaluations />
|
||||||
</Stack>
|
</VStack>
|
||||||
|
<DeleteButton />
|
||||||
|
</VStack>
|
||||||
</DrawerBody>
|
</DrawerBody>
|
||||||
</DrawerContent>
|
</DrawerContent>
|
||||||
</Drawer>
|
</Drawer>
|
||||||
57
src/components/OutputsTable/AddVariantButton.tsx
Normal file
57
src/components/OutputsTable/AddVariantButton.tsx
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import { Box, Flex, Icon, Spinner } from "@chakra-ui/react";
|
||||||
|
import { BsPlus } from "react-icons/bs";
|
||||||
|
import { Text } from "@chakra-ui/react";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import {
|
||||||
|
useExperiment,
|
||||||
|
useExperimentAccess,
|
||||||
|
useHandledAsyncCallback,
|
||||||
|
useVisibleScenarioIds,
|
||||||
|
} from "~/utils/hooks";
|
||||||
|
import { cellPadding } from "../constants";
|
||||||
|
import { ActionButton } from "./ScenariosHeader";
|
||||||
|
|
||||||
|
export default function AddVariantButton() {
|
||||||
|
const experiment = useExperiment();
|
||||||
|
const mutation = api.promptVariants.create.useMutation();
|
||||||
|
const utils = api.useContext();
|
||||||
|
const visibleScenarios = useVisibleScenarioIds();
|
||||||
|
|
||||||
|
const [onClick, loading] = useHandledAsyncCallback(async () => {
|
||||||
|
if (!experiment.data) return;
|
||||||
|
await mutation.mutateAsync({
|
||||||
|
experimentId: experiment.data.id,
|
||||||
|
streamScenarios: visibleScenarios,
|
||||||
|
});
|
||||||
|
await utils.promptVariants.list.invalidate();
|
||||||
|
}, [mutation]);
|
||||||
|
|
||||||
|
const { canModify } = useExperimentAccess();
|
||||||
|
if (!canModify) return <Box w={cellPadding.x} />;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Flex w="100%" justifyContent="flex-end">
|
||||||
|
<ActionButton
|
||||||
|
onClick={onClick}
|
||||||
|
py={5}
|
||||||
|
leftIcon={<Icon as={loading ? Spinner : BsPlus} boxSize={6} mr={loading ? 1 : 0} />}
|
||||||
|
>
|
||||||
|
<Text display={{ base: "none", md: "flex" }}>Add Variant</Text>
|
||||||
|
</ActionButton>
|
||||||
|
{/* <Button
|
||||||
|
alignItems="center"
|
||||||
|
justifyContent="center"
|
||||||
|
fontWeight="normal"
|
||||||
|
bgColor="transparent"
|
||||||
|
_hover={{ bgColor: "gray.100" }}
|
||||||
|
px={cellPadding.x}
|
||||||
|
onClick={onClick}
|
||||||
|
height="unset"
|
||||||
|
minH={headerMinHeight}
|
||||||
|
>
|
||||||
|
<Icon as={loading ? Spinner : BsPlus} boxSize={6} mr={loading ? 1 : 0} />
|
||||||
|
<Text display={{ base: "none", md: "flex" }}>Add Variant</Text>
|
||||||
|
</Button> */}
|
||||||
|
</Flex>
|
||||||
|
);
|
||||||
|
}
|
||||||
46
src/components/OutputsTable/FloatingLabelInput.tsx
Normal file
46
src/components/OutputsTable/FloatingLabelInput.tsx
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import { FormLabel, FormControl, type TextareaProps } from "@chakra-ui/react";
|
||||||
|
import { useState } from "react";
|
||||||
|
import AutoResizeTextArea from "../AutoResizeTextArea";
|
||||||
|
|
||||||
|
export const FloatingLabelInput = ({
|
||||||
|
label,
|
||||||
|
value,
|
||||||
|
...props
|
||||||
|
}: { label: string; value: string } & TextareaProps) => {
|
||||||
|
const [isFocused, setIsFocused] = useState(false);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<FormControl position="relative">
|
||||||
|
<FormLabel
|
||||||
|
position="absolute"
|
||||||
|
left="10px"
|
||||||
|
top={isFocused || !!value ? 0 : 3}
|
||||||
|
transform={isFocused || !!value ? "translateY(-50%)" : "translateY(0)"}
|
||||||
|
fontSize={isFocused || !!value ? "12px" : "16px"}
|
||||||
|
transition="all 0.15s"
|
||||||
|
zIndex="5"
|
||||||
|
bg="white"
|
||||||
|
px={1}
|
||||||
|
lineHeight="1"
|
||||||
|
pointerEvents="none"
|
||||||
|
color={isFocused ? "blue.500" : "gray.500"}
|
||||||
|
>
|
||||||
|
{label}
|
||||||
|
</FormLabel>
|
||||||
|
<AutoResizeTextArea
|
||||||
|
px={3}
|
||||||
|
pt={3}
|
||||||
|
pb={2}
|
||||||
|
onFocus={() => setIsFocused(true)}
|
||||||
|
onBlur={() => setIsFocused(false)}
|
||||||
|
borderRadius="md"
|
||||||
|
borderColor={isFocused ? "blue.500" : "gray.400"}
|
||||||
|
autoComplete="off"
|
||||||
|
value={value}
|
||||||
|
overflowY="auto"
|
||||||
|
overflowX="hidden"
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
</FormControl>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
import { Button, type ButtonProps, HStack, Spinner, Icon } from "@chakra-ui/react";
|
|
||||||
import { BsPlus } from "react-icons/bs";
|
|
||||||
import { api } from "~/utils/api";
|
|
||||||
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
|
||||||
|
|
||||||
// Extracted Button styling into reusable component
|
|
||||||
const StyledButton = ({ children, onClick }: ButtonProps) => (
|
|
||||||
<Button
|
|
||||||
fontWeight="normal"
|
|
||||||
bgColor="transparent"
|
|
||||||
_hover={{ bgColor: "gray.100" }}
|
|
||||||
px={2}
|
|
||||||
onClick={onClick}
|
|
||||||
>
|
|
||||||
{children}
|
|
||||||
</Button>
|
|
||||||
);
|
|
||||||
|
|
||||||
export default function NewScenarioButton() {
|
|
||||||
const { canModify } = useExperimentAccess();
|
|
||||||
|
|
||||||
const experiment = useExperiment();
|
|
||||||
const mutation = api.scenarios.create.useMutation();
|
|
||||||
const utils = api.useContext();
|
|
||||||
|
|
||||||
const [onClick] = useHandledAsyncCallback(async () => {
|
|
||||||
if (!experiment.data) return;
|
|
||||||
await mutation.mutateAsync({
|
|
||||||
experimentId: experiment.data.id,
|
|
||||||
});
|
|
||||||
await utils.scenarios.list.invalidate();
|
|
||||||
}, [mutation]);
|
|
||||||
|
|
||||||
const [onAutogenerate, autogenerating] = useHandledAsyncCallback(async () => {
|
|
||||||
if (!experiment.data) return;
|
|
||||||
await mutation.mutateAsync({
|
|
||||||
experimentId: experiment.data.id,
|
|
||||||
autogenerate: true,
|
|
||||||
});
|
|
||||||
await utils.scenarios.list.invalidate();
|
|
||||||
}, [mutation]);
|
|
||||||
|
|
||||||
if (!canModify) return null;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<HStack spacing={2}>
|
|
||||||
<StyledButton onClick={onClick}>
|
|
||||||
<Icon as={BsPlus} boxSize={6} />
|
|
||||||
Add Scenario
|
|
||||||
</StyledButton>
|
|
||||||
<StyledButton onClick={onAutogenerate}>
|
|
||||||
<Icon as={autogenerating ? Spinner : BsPlus} boxSize={6} mr={autogenerating ? 1 : 0} />
|
|
||||||
Autogenerate Scenario
|
|
||||||
</StyledButton>
|
|
||||||
</HStack>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
import { Box, Button, Icon, Spinner, Text } from "@chakra-ui/react";
|
|
||||||
import { BsPlus } from "react-icons/bs";
|
|
||||||
import { api } from "~/utils/api";
|
|
||||||
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
|
||||||
import { cellPadding, headerMinHeight } from "../constants";
|
|
||||||
|
|
||||||
export default function NewVariantButton() {
|
|
||||||
const experiment = useExperiment();
|
|
||||||
const mutation = api.promptVariants.create.useMutation();
|
|
||||||
const utils = api.useContext();
|
|
||||||
|
|
||||||
const [onClick, loading] = useHandledAsyncCallback(async () => {
|
|
||||||
if (!experiment.data) return;
|
|
||||||
await mutation.mutateAsync({
|
|
||||||
experimentId: experiment.data.id,
|
|
||||||
});
|
|
||||||
await utils.promptVariants.list.invalidate();
|
|
||||||
}, [mutation]);
|
|
||||||
|
|
||||||
const { canModify } = useExperimentAccess();
|
|
||||||
if (!canModify) return <Box w={cellPadding.x} />;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Button
|
|
||||||
w="100%"
|
|
||||||
alignItems="center"
|
|
||||||
justifyContent="center"
|
|
||||||
fontWeight="normal"
|
|
||||||
bgColor="transparent"
|
|
||||||
_hover={{ bgColor: "gray.100" }}
|
|
||||||
px={cellPadding.x}
|
|
||||||
onClick={onClick}
|
|
||||||
height="unset"
|
|
||||||
minH={headerMinHeight}
|
|
||||||
>
|
|
||||||
<Icon as={loading ? Spinner : BsPlus} boxSize={6} mr={loading ? 1 : 0} />
|
|
||||||
<Text display={{ base: "none", md: "flex" }}>Add Variant</Text>
|
|
||||||
</Button>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
19
src/components/OutputsTable/OutputCell/CellContent.tsx
Normal file
19
src/components/OutputsTable/OutputCell/CellContent.tsx
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { type StackProps, VStack } from "@chakra-ui/react";
|
||||||
|
import { CellOptions } from "./CellOptions";
|
||||||
|
|
||||||
|
export const CellContent = ({
|
||||||
|
hardRefetch,
|
||||||
|
hardRefetching,
|
||||||
|
children,
|
||||||
|
...props
|
||||||
|
}: {
|
||||||
|
hardRefetch: () => void;
|
||||||
|
hardRefetching: boolean;
|
||||||
|
} & StackProps) => (
|
||||||
|
<VStack w="full" alignItems="flex-start" {...props}>
|
||||||
|
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} />
|
||||||
|
<VStack w="full" alignItems="flex-start" maxH={500} overflowY="auto">
|
||||||
|
{children}
|
||||||
|
</VStack>
|
||||||
|
</VStack>
|
||||||
|
);
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { Button, HStack, Icon, Tooltip } from "@chakra-ui/react";
|
import { Button, HStack, Icon, Spinner, Tooltip } from "@chakra-ui/react";
|
||||||
import { BsArrowClockwise } from "react-icons/bs";
|
import { BsArrowClockwise } from "react-icons/bs";
|
||||||
import { useExperimentAccess } from "~/utils/hooks";
|
import { useExperimentAccess } from "~/utils/hooks";
|
||||||
|
|
||||||
@@ -12,7 +12,7 @@ export const CellOptions = ({
|
|||||||
const { canModify } = useExperimentAccess();
|
const { canModify } = useExperimentAccess();
|
||||||
return (
|
return (
|
||||||
<HStack justifyContent="flex-end" w="full">
|
<HStack justifyContent="flex-end" w="full">
|
||||||
{!refetchingOutput && canModify && (
|
{canModify && (
|
||||||
<Tooltip label="Refetch output" aria-label="refetch output">
|
<Tooltip label="Refetch output" aria-label="refetch output">
|
||||||
<Button
|
<Button
|
||||||
size="xs"
|
size="xs"
|
||||||
@@ -28,7 +28,7 @@ export const CellOptions = ({
|
|||||||
onClick={refetchOutput}
|
onClick={refetchOutput}
|
||||||
aria-label="refetch output"
|
aria-label="refetch output"
|
||||||
>
|
>
|
||||||
<Icon as={BsArrowClockwise} boxSize={4} />
|
<Icon as={refetchingOutput ? Spinner : BsArrowClockwise} boxSize={4} />
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -1,16 +1,19 @@
|
|||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { type PromptVariant, type Scenario } from "../types";
|
import { type PromptVariant, type Scenario } from "../types";
|
||||||
import { Spinner, Text, Center, VStack } from "@chakra-ui/react";
|
import { Text, VStack } from "@chakra-ui/react";
|
||||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
import SyntaxHighlighter from "react-syntax-highlighter";
|
import SyntaxHighlighter from "react-syntax-highlighter";
|
||||||
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
|
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
|
||||||
import stringify from "json-stringify-pretty-compact";
|
import stringify from "json-stringify-pretty-compact";
|
||||||
import { type ReactElement, useState, useEffect } from "react";
|
import { type ReactElement, useState, useEffect, Fragment } from "react";
|
||||||
import { type ChatCompletion } from "openai/resources/chat";
|
|
||||||
import useSocket from "~/utils/useSocket";
|
import useSocket from "~/utils/useSocket";
|
||||||
import { OutputStats } from "./OutputStats";
|
import { OutputStats } from "./OutputStats";
|
||||||
import { ErrorHandler } from "./ErrorHandler";
|
import { RetryCountdown } from "./RetryCountdown";
|
||||||
import { CellOptions } from "./CellOptions";
|
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
|
||||||
|
import { ResponseLog } from "./ResponseLog";
|
||||||
|
import { CellContent } from "./CellContent";
|
||||||
|
|
||||||
|
const WAITING_MESSAGE_INTERVAL = 20000;
|
||||||
|
|
||||||
export default function OutputCell({
|
export default function OutputCell({
|
||||||
scenario,
|
scenario,
|
||||||
@@ -33,18 +36,19 @@ export default function OutputCell({
|
|||||||
|
|
||||||
if (!templateHasVariables) disabledReason = "Add a value to the scenario variables to see output";
|
if (!templateHasVariables) disabledReason = "Add a value to the scenario variables to see output";
|
||||||
|
|
||||||
// if (variant.config === null || Object.keys(variant.config).length === 0)
|
|
||||||
// disabledReason = "Save your prompt variant to see output";
|
|
||||||
|
|
||||||
const [refetchInterval, setRefetchInterval] = useState(0);
|
const [refetchInterval, setRefetchInterval] = useState(0);
|
||||||
const { data: cell, isLoading: queryLoading } = api.scenarioVariantCells.get.useQuery(
|
const { data: cell, isLoading: queryLoading } = api.scenarioVariantCells.get.useQuery(
|
||||||
{ scenarioId: scenario.id, variantId: variant.id },
|
{ scenarioId: scenario.id, variantId: variant.id },
|
||||||
{ refetchInterval },
|
{ refetchInterval },
|
||||||
);
|
);
|
||||||
|
|
||||||
const { mutateAsync: hardRefetchMutate, isLoading: refetchingOutput } =
|
const provider =
|
||||||
api.scenarioVariantCells.forceRefetch.useMutation();
|
frontendModelProviders[variant.modelProvider as keyof typeof frontendModelProviders];
|
||||||
const [hardRefetch] = useHandledAsyncCallback(async () => {
|
|
||||||
|
type OutputSchema = Parameters<typeof provider.normalizeOutput>[0];
|
||||||
|
|
||||||
|
const { mutateAsync: hardRefetchMutate } = api.scenarioVariantCells.forceRefetch.useMutation();
|
||||||
|
const [hardRefetch, hardRefetching] = useHandledAsyncCallback(async () => {
|
||||||
await hardRefetchMutate({ scenarioId: scenario.id, variantId: variant.id });
|
await hardRefetchMutate({ scenarioId: scenario.id, variantId: variant.id });
|
||||||
await utils.scenarioVariantCells.get.invalidate({
|
await utils.scenarioVariantCells.get.invalidate({
|
||||||
scenarioId: scenario.id,
|
scenarioId: scenario.id,
|
||||||
@@ -55,51 +59,101 @@ export default function OutputCell({
|
|||||||
});
|
});
|
||||||
}, [hardRefetchMutate, scenario.id, variant.id]);
|
}, [hardRefetchMutate, scenario.id, variant.id]);
|
||||||
|
|
||||||
const fetchingOutput = queryLoading || refetchingOutput;
|
const fetchingOutput = queryLoading || hardRefetching;
|
||||||
|
|
||||||
const awaitingOutput =
|
const awaitingOutput =
|
||||||
!cell ||
|
!cell ||
|
||||||
|
!cell.evalsComplete ||
|
||||||
cell.retrievalStatus === "PENDING" ||
|
cell.retrievalStatus === "PENDING" ||
|
||||||
cell.retrievalStatus === "IN_PROGRESS" ||
|
cell.retrievalStatus === "IN_PROGRESS" ||
|
||||||
refetchingOutput;
|
hardRefetching;
|
||||||
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : 0), [awaitingOutput]);
|
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : 0), [awaitingOutput]);
|
||||||
|
|
||||||
const modelOutput = cell?.modelOutput;
|
// TODO: disconnect from socket if we're not streaming anymore
|
||||||
|
const streamedMessage = useSocket<OutputSchema>(cell?.id);
|
||||||
// Disconnect from socket if we're not streaming anymore
|
|
||||||
const streamedMessage = useSocket(cell?.streamingChannel);
|
|
||||||
const streamedContent = streamedMessage?.choices?.[0]?.message?.content;
|
|
||||||
|
|
||||||
if (!vars) return null;
|
if (!vars) return null;
|
||||||
|
|
||||||
if (disabledReason) return <Text color="gray.500">{disabledReason}</Text>;
|
if (!cell && !fetchingOutput)
|
||||||
|
|
||||||
if (awaitingOutput && !streamedMessage)
|
|
||||||
return (
|
return (
|
||||||
<Center h="100%" w="100%">
|
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
|
||||||
<Spinner />
|
<Text color="gray.500">Error retrieving output</Text>
|
||||||
</Center>
|
</CellContent>
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!cell && !fetchingOutput) return <Text color="gray.500">Error retrieving output</Text>;
|
|
||||||
|
|
||||||
if (cell && cell.errorMessage) {
|
if (cell && cell.errorMessage) {
|
||||||
return <ErrorHandler cell={cell} refetchOutput={hardRefetch} />;
|
return (
|
||||||
|
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
|
||||||
|
<Text color="red.500">{cell.errorMessage}</Text>
|
||||||
|
</CellContent>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = modelOutput?.output as unknown as ChatCompletion;
|
if (disabledReason) return <Text color="gray.500">{disabledReason}</Text>;
|
||||||
const message = response?.choices?.[0]?.message;
|
|
||||||
|
|
||||||
if (modelOutput && message?.function_call) {
|
const mostRecentResponse = cell?.modelResponses[cell.modelResponses.length - 1];
|
||||||
const rawArgs = message.function_call.arguments ?? "null";
|
const showLogs = !streamedMessage && !mostRecentResponse?.output;
|
||||||
let parsedArgs: string;
|
|
||||||
try {
|
if (showLogs)
|
||||||
parsedArgs = JSON.parse(rawArgs);
|
return (
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
<CellContent
|
||||||
} catch (e: any) {
|
hardRefetching={hardRefetching}
|
||||||
parsedArgs = `Failed to parse arguments as JSON: '${rawArgs}' ERROR: ${e.message as string}`;
|
hardRefetch={hardRefetch}
|
||||||
|
alignItems="flex-start"
|
||||||
|
fontFamily="inconsolata, monospace"
|
||||||
|
spacing={0}
|
||||||
|
>
|
||||||
|
{cell?.jobQueuedAt && <ResponseLog time={cell.jobQueuedAt} title="Job queued" />}
|
||||||
|
{cell?.jobStartedAt && <ResponseLog time={cell.jobStartedAt} title="Job started" />}
|
||||||
|
{cell?.modelResponses?.map((response) => {
|
||||||
|
let numWaitingMessages = 0;
|
||||||
|
const relativeWaitingTime = response.receivedAt
|
||||||
|
? response.receivedAt.getTime()
|
||||||
|
: Date.now();
|
||||||
|
if (response.requestedAt) {
|
||||||
|
numWaitingMessages = Math.floor(
|
||||||
|
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
return (
|
||||||
|
<Fragment key={response.id}>
|
||||||
|
{response.requestedAt && (
|
||||||
|
<ResponseLog time={response.requestedAt} title="Request sent to API" />
|
||||||
|
)}
|
||||||
|
{response.requestedAt &&
|
||||||
|
Array.from({ length: numWaitingMessages }, (_, i) => (
|
||||||
|
<ResponseLog
|
||||||
|
key={`waiting-${i}`}
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
|
time={new Date(response.requestedAt!.getTime() + i * WAITING_MESSAGE_INTERVAL)}
|
||||||
|
title="Waiting for response"
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
{response.receivedAt && (
|
||||||
|
<ResponseLog
|
||||||
|
time={response.receivedAt}
|
||||||
|
title="Response received from API"
|
||||||
|
message={`statusCode: ${response.statusCode ?? ""}\n ${
|
||||||
|
response.errorMessage ?? ""
|
||||||
|
}`}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</Fragment>
|
||||||
|
);
|
||||||
|
}) ?? null}
|
||||||
|
{mostRecentResponse?.retryTime && (
|
||||||
|
<RetryCountdown retryTime={mostRecentResponse.retryTime} />
|
||||||
|
)}
|
||||||
|
</CellContent>
|
||||||
|
);
|
||||||
|
|
||||||
|
const normalizedOutput = mostRecentResponse?.output
|
||||||
|
? provider.normalizeOutput(mostRecentResponse?.output)
|
||||||
|
: streamedMessage
|
||||||
|
? provider.normalizeOutput(streamedMessage)
|
||||||
|
: null;
|
||||||
|
|
||||||
|
if (mostRecentResponse?.output && normalizedOutput?.type === "json") {
|
||||||
return (
|
return (
|
||||||
<VStack
|
<VStack
|
||||||
w="100%"
|
w="100%"
|
||||||
@@ -109,8 +163,13 @@ export default function OutputCell({
|
|||||||
overflowX="hidden"
|
overflowX="hidden"
|
||||||
justifyContent="space-between"
|
justifyContent="space-between"
|
||||||
>
|
>
|
||||||
<VStack w="full" flex={1} spacing={0}>
|
<CellContent
|
||||||
<CellOptions refetchingOutput={refetchingOutput} refetchOutput={hardRefetch} />
|
hardRefetching={hardRefetching}
|
||||||
|
hardRefetch={hardRefetch}
|
||||||
|
w="full"
|
||||||
|
flex={1}
|
||||||
|
spacing={0}
|
||||||
|
>
|
||||||
<SyntaxHighlighter
|
<SyntaxHighlighter
|
||||||
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
|
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
|
||||||
language="json"
|
language="json"
|
||||||
@@ -120,30 +179,26 @@ export default function OutputCell({
|
|||||||
}}
|
}}
|
||||||
wrapLines
|
wrapLines
|
||||||
>
|
>
|
||||||
{stringify(
|
{stringify(normalizedOutput.value, { maxLength: 40 })}
|
||||||
{
|
|
||||||
function: message.function_call.name,
|
|
||||||
args: parsedArgs,
|
|
||||||
},
|
|
||||||
{ maxLength: 40 },
|
|
||||||
)}
|
|
||||||
</SyntaxHighlighter>
|
</SyntaxHighlighter>
|
||||||
</VStack>
|
</CellContent>
|
||||||
<OutputStats modelOutput={modelOutput} scenario={scenario} />
|
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
|
||||||
</VStack>
|
</VStack>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const contentToDisplay =
|
const contentToDisplay = (normalizedOutput?.type === "text" && normalizedOutput.value) || "";
|
||||||
message?.content ?? streamedContent ?? JSON.stringify(modelOutput?.output);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<VStack w="100%" h="100%" justifyContent="space-between" whiteSpace="pre-wrap">
|
<VStack w="100%" h="100%" justifyContent="space-between" whiteSpace="pre-wrap">
|
||||||
<VStack w="full" alignItems="flex-start" spacing={0}>
|
<VStack w="full" alignItems="flex-start" spacing={0}>
|
||||||
<CellOptions refetchingOutput={refetchingOutput} refetchOutput={hardRefetch} />
|
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
|
||||||
<Text>{contentToDisplay}</Text>
|
<Text>{contentToDisplay}</Text>
|
||||||
|
</CellContent>
|
||||||
</VStack>
|
</VStack>
|
||||||
{modelOutput && <OutputStats modelOutput={modelOutput} scenario={scenario} />}
|
{mostRecentResponse?.output && (
|
||||||
|
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
|
||||||
|
)}
|
||||||
</VStack>
|
</VStack>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,28 +7,32 @@ import { CostTooltip } from "~/components/tooltip/CostTooltip";
|
|||||||
const SHOW_TIME = true;
|
const SHOW_TIME = true;
|
||||||
|
|
||||||
export const OutputStats = ({
|
export const OutputStats = ({
|
||||||
modelOutput,
|
modelResponse,
|
||||||
}: {
|
}: {
|
||||||
modelOutput: NonNullable<
|
modelResponse: NonNullable<
|
||||||
NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>["modelOutput"]
|
NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>["modelResponses"][0]
|
||||||
>;
|
>;
|
||||||
scenario: Scenario;
|
scenario: Scenario;
|
||||||
}) => {
|
}) => {
|
||||||
const timeToComplete = modelOutput.timeToComplete;
|
const timeToComplete =
|
||||||
|
modelResponse.receivedAt && modelResponse.requestedAt
|
||||||
|
? modelResponse.receivedAt.getTime() - modelResponse.requestedAt.getTime()
|
||||||
|
: 0;
|
||||||
|
|
||||||
const promptTokens = modelOutput.promptTokens;
|
const promptTokens = modelResponse.promptTokens;
|
||||||
const completionTokens = modelOutput.completionTokens;
|
const completionTokens = modelResponse.completionTokens;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<HStack w="full" align="center" color="gray.500" fontSize="2xs" mt={{ base: 0, md: 1 }}>
|
<HStack w="full" align="center" color="gray.500" fontSize="2xs" mt={{ base: 0, md: 1 }}>
|
||||||
<HStack flex={1}>
|
<HStack flex={1}>
|
||||||
{modelOutput.outputEvaluation.map((evaluation) => {
|
{modelResponse.outputEvaluations.map((evaluation) => {
|
||||||
const passed = evaluation.result > 0.5;
|
const passed = evaluation.result > 0.5;
|
||||||
return (
|
return (
|
||||||
<Tooltip
|
<Tooltip
|
||||||
isDisabled={!evaluation.details}
|
isDisabled={!evaluation.details}
|
||||||
label={evaluation.details}
|
label={evaluation.details}
|
||||||
key={evaluation.id}
|
key={evaluation.id}
|
||||||
|
shouldWrapChildren
|
||||||
>
|
>
|
||||||
<HStack spacing={0}>
|
<HStack spacing={0}>
|
||||||
<Text>{evaluation.evaluation.label}</Text>
|
<Text>{evaluation.evaluation.label}</Text>
|
||||||
@@ -42,15 +46,15 @@ export const OutputStats = ({
|
|||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
</HStack>
|
</HStack>
|
||||||
{modelOutput.cost && (
|
{modelResponse.cost && (
|
||||||
<CostTooltip
|
<CostTooltip
|
||||||
promptTokens={promptTokens}
|
promptTokens={promptTokens}
|
||||||
completionTokens={completionTokens}
|
completionTokens={completionTokens}
|
||||||
cost={modelOutput.cost}
|
cost={modelResponse.cost}
|
||||||
>
|
>
|
||||||
<HStack spacing={0}>
|
<HStack spacing={0}>
|
||||||
<Icon as={BsCurrencyDollar} />
|
<Icon as={BsCurrencyDollar} />
|
||||||
<Text mr={1}>{modelOutput.cost.toFixed(3)}</Text>
|
<Text mr={1}>{modelResponse.cost.toFixed(3)}</Text>
|
||||||
</HStack>
|
</HStack>
|
||||||
</CostTooltip>
|
</CostTooltip>
|
||||||
)}
|
)}
|
||||||
|
|||||||
22
src/components/OutputsTable/OutputCell/ResponseLog.tsx
Normal file
22
src/components/OutputsTable/OutputCell/ResponseLog.tsx
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { HStack, VStack, Text } from "@chakra-ui/react";
|
||||||
|
import dayjs from "dayjs";
|
||||||
|
|
||||||
|
export const ResponseLog = ({
|
||||||
|
time,
|
||||||
|
title,
|
||||||
|
message,
|
||||||
|
}: {
|
||||||
|
time: Date;
|
||||||
|
title: string;
|
||||||
|
message?: string;
|
||||||
|
}) => {
|
||||||
|
return (
|
||||||
|
<VStack spacing={0} alignItems="flex-start">
|
||||||
|
<HStack>
|
||||||
|
<Text>{dayjs(time).format("HH:mm:ss")}</Text>
|
||||||
|
<Text>{title}</Text>
|
||||||
|
</HStack>
|
||||||
|
{message && <Text pl={4}>{message}</Text>}
|
||||||
|
</VStack>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -1,21 +1,12 @@
|
|||||||
import { type ScenarioVariantCell } from "@prisma/client";
|
import { Text } from "@chakra-ui/react";
|
||||||
import { VStack, Text } from "@chakra-ui/react";
|
|
||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
import pluralize from "pluralize";
|
import pluralize from "pluralize";
|
||||||
|
|
||||||
export const ErrorHandler = ({
|
export const RetryCountdown = ({ retryTime }: { retryTime: Date }) => {
|
||||||
cell,
|
|
||||||
refetchOutput,
|
|
||||||
}: {
|
|
||||||
cell: ScenarioVariantCell;
|
|
||||||
refetchOutput: () => void;
|
|
||||||
}) => {
|
|
||||||
const [msToWait, setMsToWait] = useState(0);
|
const [msToWait, setMsToWait] = useState(0);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!cell.retryTime) return;
|
const initialWaitTime = retryTime.getTime() - Date.now();
|
||||||
|
|
||||||
const initialWaitTime = cell.retryTime.getTime() - Date.now();
|
|
||||||
const msModuloOneSecond = initialWaitTime % 1000;
|
const msModuloOneSecond = initialWaitTime % 1000;
|
||||||
let remainingTime = initialWaitTime - msModuloOneSecond;
|
let remainingTime = initialWaitTime - msModuloOneSecond;
|
||||||
setMsToWait(remainingTime);
|
setMsToWait(remainingTime);
|
||||||
@@ -36,18 +27,13 @@ export const ErrorHandler = ({
|
|||||||
clearInterval(interval);
|
clearInterval(interval);
|
||||||
clearTimeout(timeout);
|
clearTimeout(timeout);
|
||||||
};
|
};
|
||||||
}, [cell.retryTime, cell.statusCode, setMsToWait, refetchOutput]);
|
}, [retryTime]);
|
||||||
|
|
||||||
|
if (msToWait <= 0) return null;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<VStack w="full">
|
|
||||||
<Text color="red.600" wordBreak="break-word">
|
|
||||||
{cell.errorMessage}
|
|
||||||
</Text>
|
|
||||||
{msToWait > 0 && (
|
|
||||||
<Text color="red.600" fontSize="sm">
|
<Text color="red.600" fontSize="sm">
|
||||||
Retrying in {pluralize("second", Math.ceil(msToWait / 1000), true)}...
|
Retrying in {pluralize("second", Math.ceil(msToWait / 1000), true)}...
|
||||||
</Text>
|
</Text>
|
||||||
)}
|
|
||||||
</VStack>
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@@ -1,15 +1,27 @@
|
|||||||
import { type DragEvent } from "react";
|
import { useEffect, type DragEvent } from "react";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { isEqual } from "lodash-es";
|
import { isEqual } from "lodash-es";
|
||||||
import { type Scenario } from "./types";
|
import { type Scenario } from "./types";
|
||||||
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
|
|
||||||
import { Box, Button, Flex, HStack, Icon, Spinner, Stack, Tooltip, VStack } from "@chakra-ui/react";
|
import {
|
||||||
|
Box,
|
||||||
|
Button,
|
||||||
|
HStack,
|
||||||
|
Icon,
|
||||||
|
IconButton,
|
||||||
|
Spinner,
|
||||||
|
Stack,
|
||||||
|
Tooltip,
|
||||||
|
VStack,
|
||||||
|
Text,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
import { cellPadding } from "../constants";
|
import { cellPadding } from "../constants";
|
||||||
import { BsX } from "react-icons/bs";
|
import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
|
||||||
import { RiDraggable } from "react-icons/ri";
|
import { RiDraggable } from "react-icons/ri";
|
||||||
import AutoResizeTextArea from "../AutoResizeTextArea";
|
import { FloatingLabelInput } from "./FloatingLabelInput";
|
||||||
|
import { ScenarioEditorModal } from "./ScenarioEditorModal";
|
||||||
|
|
||||||
export default function ScenarioEditor({
|
export default function ScenarioEditor({
|
||||||
scenario,
|
scenario,
|
||||||
@@ -28,6 +40,10 @@ export default function ScenarioEditor({
|
|||||||
|
|
||||||
const [values, setValues] = useState<Record<string, string>>(savedValues);
|
const [values, setValues] = useState<Record<string, string>>(savedValues);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (savedValues) setValues(savedValues);
|
||||||
|
}, [savedValues]);
|
||||||
|
|
||||||
const experiment = useExperiment();
|
const experiment = useExperiment();
|
||||||
const vars = api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
|
const vars = api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
|
||||||
|
|
||||||
@@ -71,12 +87,15 @@ export default function ScenarioEditor({
|
|||||||
[reorderMutation, scenario.id],
|
[reorderMutation, scenario.id],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const [scenarioEditorModalOpen, setScenarioEditorModalOpen] = useState(false);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
<>
|
||||||
<HStack
|
<HStack
|
||||||
alignItems="flex-start"
|
alignItems="flex-start"
|
||||||
pr={cellPadding.x}
|
px={cellPadding.x}
|
||||||
py={cellPadding.y}
|
py={cellPadding.y}
|
||||||
pl={canModify ? 0 : cellPadding.x}
|
spacing={0}
|
||||||
height="100%"
|
height="100%"
|
||||||
draggable={!variableInputHovered}
|
draggable={!variableInputHovered}
|
||||||
onDragStart={(e) => {
|
onDragStart={(e) => {
|
||||||
@@ -96,10 +115,13 @@ export default function ScenarioEditor({
|
|||||||
onDrop={onReorder}
|
onDrop={onReorder}
|
||||||
backgroundColor={isDragTarget ? "gray.100" : "transparent"}
|
backgroundColor={isDragTarget ? "gray.100" : "transparent"}
|
||||||
>
|
>
|
||||||
{canModify && (
|
{canModify && props.canHide && (
|
||||||
<Stack alignSelf="flex-start" opacity={props.hovered ? 1 : 0} spacing={0}>
|
<Stack
|
||||||
{props.canHide && (
|
alignSelf="flex-start"
|
||||||
<>
|
opacity={props.hovered ? 1 : 0}
|
||||||
|
spacing={0}
|
||||||
|
ml={-cellPadding.x}
|
||||||
|
>
|
||||||
<Tooltip label="Hide scenario" hasArrow>
|
<Tooltip label="Hide scenario" hasArrow>
|
||||||
{/* for some reason the tooltip can't position itself properly relative to the icon without the wrapping box */}
|
{/* for some reason the tooltip can't position itself properly relative to the icon without the wrapping box */}
|
||||||
<Button
|
<Button
|
||||||
@@ -114,7 +136,7 @@ export default function ScenarioEditor({
|
|||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Icon as={hidingInProgress ? Spinner : BsX} boxSize={6} />
|
<Icon as={hidingInProgress ? Spinner : BsX} boxSize={hidingInProgress ? 4 : 6} />
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
<Icon
|
<Icon
|
||||||
@@ -123,51 +145,44 @@ export default function ScenarioEditor({
|
|||||||
color="gray.400"
|
color="gray.400"
|
||||||
_hover={{ color: "gray.800", cursor: "pointer" }}
|
_hover={{ color: "gray.800", cursor: "pointer" }}
|
||||||
/>
|
/>
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Stack>
|
</Stack>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{variableLabels.length === 0 ? (
|
{variableLabels.length === 0 ? (
|
||||||
<Box color="gray.500">{vars.data ? "No scenario variables configured" : "Loading..."}</Box>
|
<Box color="gray.500">
|
||||||
|
{vars.data ? "No scenario variables configured" : "Loading..."}
|
||||||
|
</Box>
|
||||||
) : (
|
) : (
|
||||||
<VStack spacing={1}>
|
<VStack spacing={4} flex={1} py={2}>
|
||||||
|
<HStack justifyContent="space-between" w="100%">
|
||||||
|
<Text color="gray.500">Scenario</Text>
|
||||||
|
<IconButton
|
||||||
|
className="fullscreen-toggle"
|
||||||
|
aria-label="Maximize"
|
||||||
|
icon={<BsArrowsAngleExpand />}
|
||||||
|
onClick={() => setScenarioEditorModalOpen(true)}
|
||||||
|
boxSize={6}
|
||||||
|
borderRadius={4}
|
||||||
|
p={1.5}
|
||||||
|
minW={0}
|
||||||
|
colorScheme="gray"
|
||||||
|
color="gray.500"
|
||||||
|
variant="ghost"
|
||||||
|
/>
|
||||||
|
</HStack>
|
||||||
{variableLabels.map((key) => {
|
{variableLabels.map((key) => {
|
||||||
const value = values[key] ?? "";
|
const value = values[key] ?? "";
|
||||||
const layoutDirection = value.length > 20 ? "column" : "row";
|
|
||||||
return (
|
return (
|
||||||
<Flex
|
<FloatingLabelInput
|
||||||
key={key}
|
key={key}
|
||||||
direction={layoutDirection}
|
label={key}
|
||||||
alignItems={layoutDirection === "column" ? "flex-start" : "center"}
|
|
||||||
flexWrap="wrap"
|
|
||||||
width="full"
|
|
||||||
>
|
|
||||||
<Box
|
|
||||||
bgColor="blue.100"
|
|
||||||
color="blue.600"
|
|
||||||
px={1}
|
|
||||||
my="3px"
|
|
||||||
fontSize="xs"
|
|
||||||
fontWeight="bold"
|
|
||||||
>
|
|
||||||
{key}
|
|
||||||
</Box>
|
|
||||||
<AutoResizeTextArea
|
|
||||||
px={2}
|
|
||||||
py={1}
|
|
||||||
placeholder="empty"
|
|
||||||
borderRadius="sm"
|
|
||||||
fontSize="sm"
|
|
||||||
lineHeight={1.2}
|
|
||||||
value={value}
|
|
||||||
isDisabled={!canModify}
|
isDisabled={!canModify}
|
||||||
_disabled={{ opacity: 1, cursor: "default" }}
|
style={{ width: "100%" }}
|
||||||
|
maxHeight={32}
|
||||||
|
value={value}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setValues((prev) => ({ ...prev, [key]: e.target.value }));
|
setValues((prev) => ({ ...prev, [key]: e.target.value }));
|
||||||
}}
|
}}
|
||||||
maxH="32"
|
|
||||||
overflowY="auto"
|
|
||||||
onKeyDown={(e) => {
|
onKeyDown={(e) => {
|
||||||
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
|
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
@@ -175,16 +190,9 @@ export default function ScenarioEditor({
|
|||||||
onSave();
|
onSave();
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
resize="none"
|
|
||||||
overflow="hidden"
|
|
||||||
flex={layoutDirection === "row" ? 1 : undefined}
|
|
||||||
borderColor={hasChanged ? "blue.300" : "transparent"}
|
|
||||||
_hover={{ borderColor: "gray.300" }}
|
|
||||||
_focus={{ borderColor: "blue.500", outline: "none", bg: "white" }}
|
|
||||||
onMouseEnter={() => setVariableInputHovered(true)}
|
onMouseEnter={() => setVariableInputHovered(true)}
|
||||||
onMouseLeave={() => setVariableInputHovered(false)}
|
onMouseLeave={() => setVariableInputHovered(false)}
|
||||||
/>
|
/>
|
||||||
</Flex>
|
|
||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
{hasChanged && (
|
{hasChanged && (
|
||||||
@@ -206,5 +214,13 @@ export default function ScenarioEditor({
|
|||||||
</VStack>
|
</VStack>
|
||||||
)}
|
)}
|
||||||
</HStack>
|
</HStack>
|
||||||
|
{scenarioEditorModalOpen && (
|
||||||
|
<ScenarioEditorModal
|
||||||
|
scenarioId={scenario.id}
|
||||||
|
initialValues={savedValues}
|
||||||
|
onClose={() => setScenarioEditorModalOpen(false)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
132
src/components/OutputsTable/ScenarioEditorModal.tsx
Normal file
132
src/components/OutputsTable/ScenarioEditorModal.tsx
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
import {
|
||||||
|
Button,
|
||||||
|
HStack,
|
||||||
|
Icon,
|
||||||
|
Modal,
|
||||||
|
ModalBody,
|
||||||
|
ModalCloseButton,
|
||||||
|
ModalContent,
|
||||||
|
ModalFooter,
|
||||||
|
ModalHeader,
|
||||||
|
ModalOverlay,
|
||||||
|
Spinner,
|
||||||
|
Text,
|
||||||
|
VStack,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
|
import { useEffect, useState } from "react";
|
||||||
|
import { BsFileTextFill } from "react-icons/bs";
|
||||||
|
import { isEqual } from "lodash-es";
|
||||||
|
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import {
|
||||||
|
useScenario,
|
||||||
|
useHandledAsyncCallback,
|
||||||
|
useExperiment,
|
||||||
|
useExperimentAccess,
|
||||||
|
} from "~/utils/hooks";
|
||||||
|
import { FloatingLabelInput } from "./FloatingLabelInput";
|
||||||
|
|
||||||
|
export const ScenarioEditorModal = ({
|
||||||
|
scenarioId,
|
||||||
|
initialValues,
|
||||||
|
onClose,
|
||||||
|
}: {
|
||||||
|
scenarioId: string;
|
||||||
|
initialValues: Record<string, string>;
|
||||||
|
onClose: () => void;
|
||||||
|
}) => {
|
||||||
|
const utils = api.useContext();
|
||||||
|
const experiment = useExperiment();
|
||||||
|
const { canModify } = useExperimentAccess();
|
||||||
|
const scenario = useScenario(scenarioId);
|
||||||
|
|
||||||
|
const savedValues = scenario.data?.variableValues as Record<string, string>;
|
||||||
|
|
||||||
|
const [values, setValues] = useState<Record<string, string>>(initialValues);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (savedValues) setValues(savedValues);
|
||||||
|
}, [savedValues]);
|
||||||
|
|
||||||
|
const hasChanged = !isEqual(savedValues, values);
|
||||||
|
|
||||||
|
const mutation = api.scenarios.replaceWithValues.useMutation();
|
||||||
|
|
||||||
|
const [onSave, saving] = useHandledAsyncCallback(async () => {
|
||||||
|
await mutation.mutateAsync({
|
||||||
|
id: scenarioId,
|
||||||
|
values,
|
||||||
|
});
|
||||||
|
await utils.scenarios.list.invalidate();
|
||||||
|
}, [mutation, values]);
|
||||||
|
|
||||||
|
console.log("scenario", scenario);
|
||||||
|
|
||||||
|
const vars = api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
|
||||||
|
const variableLabels = vars.data?.map((v) => v.label) ?? [];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Modal
|
||||||
|
isOpen
|
||||||
|
onClose={onClose}
|
||||||
|
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "5xl", xl: "7xl" }}
|
||||||
|
>
|
||||||
|
<ModalOverlay />
|
||||||
|
<ModalContent w={1200}>
|
||||||
|
<ModalHeader>
|
||||||
|
<HStack>
|
||||||
|
<Icon as={BsFileTextFill} />
|
||||||
|
<Text>Scenario</Text>
|
||||||
|
</HStack>
|
||||||
|
</ModalHeader>
|
||||||
|
<ModalCloseButton />
|
||||||
|
<ModalBody maxW="unset">
|
||||||
|
<VStack spacing={8}>
|
||||||
|
{values &&
|
||||||
|
variableLabels.map((key) => {
|
||||||
|
const value = values[key] ?? "";
|
||||||
|
return (
|
||||||
|
<FloatingLabelInput
|
||||||
|
key={key}
|
||||||
|
label={key}
|
||||||
|
isDisabled={!canModify}
|
||||||
|
_disabled={{ opacity: 1 }}
|
||||||
|
style={{ width: "100%" }}
|
||||||
|
value={value}
|
||||||
|
onChange={(e) => {
|
||||||
|
setValues((prev) => ({ ...prev, [key]: e.target.value }));
|
||||||
|
}}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
|
||||||
|
e.preventDefault();
|
||||||
|
e.currentTarget.blur();
|
||||||
|
onSave();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</VStack>
|
||||||
|
</ModalBody>
|
||||||
|
|
||||||
|
<ModalFooter>
|
||||||
|
{canModify && (
|
||||||
|
<HStack>
|
||||||
|
<Button
|
||||||
|
colorScheme="gray"
|
||||||
|
onClick={() => setValues(savedValues)}
|
||||||
|
minW={24}
|
||||||
|
isDisabled={!hasChanged}
|
||||||
|
>
|
||||||
|
<Text>Reset</Text>
|
||||||
|
</Button>
|
||||||
|
<Button colorScheme="blue" onClick={onSave} minW={24} isDisabled={!hasChanged}>
|
||||||
|
{saving ? <Spinner boxSize={4} /> : <Text>Save</Text>}
|
||||||
|
</Button>
|
||||||
|
</HStack>
|
||||||
|
)}
|
||||||
|
</ModalFooter>
|
||||||
|
</ModalContent>
|
||||||
|
</Modal>
|
||||||
|
);
|
||||||
|
};
|
||||||
74
src/components/OutputsTable/ScenarioPaginator.tsx
Normal file
74
src/components/OutputsTable/ScenarioPaginator.tsx
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
import { Box, HStack, IconButton } from "@chakra-ui/react";
|
||||||
|
import {
|
||||||
|
BsChevronDoubleLeft,
|
||||||
|
BsChevronDoubleRight,
|
||||||
|
BsChevronLeft,
|
||||||
|
BsChevronRight,
|
||||||
|
} from "react-icons/bs";
|
||||||
|
import { usePage, useScenarios } from "~/utils/hooks";
|
||||||
|
|
||||||
|
const ScenarioPaginator = () => {
|
||||||
|
const [page, setPage] = usePage();
|
||||||
|
const { data } = useScenarios();
|
||||||
|
|
||||||
|
if (!data) return null;
|
||||||
|
|
||||||
|
const { scenarios, startIndex, lastPage, count } = data;
|
||||||
|
|
||||||
|
const nextPage = () => {
|
||||||
|
if (page < lastPage) {
|
||||||
|
setPage(page + 1, "replace");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const prevPage = () => {
|
||||||
|
if (page > 1) {
|
||||||
|
setPage(page - 1, "replace");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const goToLastPage = () => setPage(lastPage, "replace");
|
||||||
|
const goToFirstPage = () => setPage(1, "replace");
|
||||||
|
|
||||||
|
return (
|
||||||
|
<HStack pt={4}>
|
||||||
|
<IconButton
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={goToFirstPage}
|
||||||
|
isDisabled={page === 1}
|
||||||
|
aria-label="Go to first page"
|
||||||
|
icon={<BsChevronDoubleLeft />}
|
||||||
|
/>
|
||||||
|
<IconButton
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={prevPage}
|
||||||
|
isDisabled={page === 1}
|
||||||
|
aria-label="Previous page"
|
||||||
|
icon={<BsChevronLeft />}
|
||||||
|
/>
|
||||||
|
<Box>
|
||||||
|
{startIndex}-{startIndex + scenarios.length - 1} / {count}
|
||||||
|
</Box>
|
||||||
|
<IconButton
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={nextPage}
|
||||||
|
isDisabled={page === lastPage}
|
||||||
|
aria-label="Next page"
|
||||||
|
icon={<BsChevronRight />}
|
||||||
|
/>
|
||||||
|
<IconButton
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={goToLastPage}
|
||||||
|
isDisabled={page === lastPage}
|
||||||
|
aria-label="Go to last page"
|
||||||
|
icon={<BsChevronDoubleRight />}
|
||||||
|
/>
|
||||||
|
</HStack>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ScenarioPaginator;
|
||||||
@@ -4,11 +4,13 @@ import { cellPadding } from "../constants";
|
|||||||
import OutputCell from "./OutputCell/OutputCell";
|
import OutputCell from "./OutputCell/OutputCell";
|
||||||
import ScenarioEditor from "./ScenarioEditor";
|
import ScenarioEditor from "./ScenarioEditor";
|
||||||
import type { PromptVariant, Scenario } from "./types";
|
import type { PromptVariant, Scenario } from "./types";
|
||||||
|
import { borders } from "./styles";
|
||||||
|
|
||||||
const ScenarioRow = (props: {
|
const ScenarioRow = (props: {
|
||||||
scenario: Scenario;
|
scenario: Scenario;
|
||||||
variants: PromptVariant[];
|
variants: PromptVariant[];
|
||||||
canHide: boolean;
|
canHide: boolean;
|
||||||
|
rowStart: number;
|
||||||
}) => {
|
}) => {
|
||||||
const [isHovered, setIsHovered] = useState(false);
|
const [isHovered, setIsHovered] = useState(false);
|
||||||
|
|
||||||
@@ -21,15 +23,21 @@ const ScenarioRow = (props: {
|
|||||||
onMouseLeave={() => setIsHovered(false)}
|
onMouseLeave={() => setIsHovered(false)}
|
||||||
sx={isHovered ? highlightStyle : undefined}
|
sx={isHovered ? highlightStyle : undefined}
|
||||||
borderLeftWidth={1}
|
borderLeftWidth={1}
|
||||||
|
{...borders}
|
||||||
|
rowStart={props.rowStart}
|
||||||
|
colStart={1}
|
||||||
>
|
>
|
||||||
<ScenarioEditor scenario={props.scenario} hovered={isHovered} canHide={props.canHide} />
|
<ScenarioEditor scenario={props.scenario} hovered={isHovered} canHide={props.canHide} />
|
||||||
</GridItem>
|
</GridItem>
|
||||||
{props.variants.map((variant) => (
|
{props.variants.map((variant, i) => (
|
||||||
<GridItem
|
<GridItem
|
||||||
key={variant.id}
|
key={variant.id}
|
||||||
onMouseEnter={() => setIsHovered(true)}
|
onMouseEnter={() => setIsHovered(true)}
|
||||||
onMouseLeave={() => setIsHovered(false)}
|
onMouseLeave={() => setIsHovered(false)}
|
||||||
sx={isHovered ? highlightStyle : undefined}
|
sx={isHovered ? highlightStyle : undefined}
|
||||||
|
rowStart={props.rowStart}
|
||||||
|
colStart={i + 2}
|
||||||
|
{...borders}
|
||||||
>
|
>
|
||||||
<Box h="100%" w="100%" px={cellPadding.x} py={cellPadding.y}>
|
<Box h="100%" w="100%" px={cellPadding.x} py={cellPadding.y}>
|
||||||
<OutputCell key={variant.id} scenario={props.scenario} variant={variant} />
|
<OutputCell key={variant.id} scenario={props.scenario} variant={variant} />
|
||||||
|
|||||||
@@ -1,52 +1,82 @@
|
|||||||
import { Button, GridItem, HStack, Heading } from "@chakra-ui/react";
|
import {
|
||||||
|
Button,
|
||||||
|
type ButtonProps,
|
||||||
|
HStack,
|
||||||
|
Text,
|
||||||
|
Icon,
|
||||||
|
Menu,
|
||||||
|
MenuButton,
|
||||||
|
MenuList,
|
||||||
|
MenuItem,
|
||||||
|
IconButton,
|
||||||
|
Spinner,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
import { cellPadding } from "../constants";
|
import { cellPadding } from "../constants";
|
||||||
import { useElementDimensions, useExperimentAccess } from "~/utils/hooks";
|
import {
|
||||||
import { stickyHeaderStyle } from "./styles";
|
useExperiment,
|
||||||
import { BsPencil } from "react-icons/bs";
|
useExperimentAccess,
|
||||||
|
useHandledAsyncCallback,
|
||||||
|
useScenarios,
|
||||||
|
} from "~/utils/hooks";
|
||||||
|
import { BsGear, BsPencil, BsPlus, BsStars } from "react-icons/bs";
|
||||||
import { useAppStore } from "~/state/store";
|
import { useAppStore } from "~/state/store";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
|
||||||
export const ScenariosHeader = ({
|
export const ActionButton = (props: ButtonProps) => (
|
||||||
headerRows,
|
<Button size="sm" variant="ghost" color="gray.600" {...props} />
|
||||||
numScenarios,
|
);
|
||||||
}: {
|
|
||||||
headerRows: number;
|
export const ScenariosHeader = () => {
|
||||||
numScenarios: number;
|
|
||||||
}) => {
|
|
||||||
const openDrawer = useAppStore((s) => s.openDrawer);
|
const openDrawer = useAppStore((s) => s.openDrawer);
|
||||||
const { canModify } = useExperimentAccess();
|
const { canModify } = useExperimentAccess();
|
||||||
|
const scenarios = useScenarios();
|
||||||
|
|
||||||
const [ref, dimensions] = useElementDimensions();
|
const experiment = useExperiment();
|
||||||
const topValue = dimensions ? `-${dimensions.height - 24}px` : "-455px";
|
const createScenarioMutation = api.scenarios.create.useMutation();
|
||||||
|
const utils = api.useContext();
|
||||||
|
|
||||||
|
const [onAddScenario, loading] = useHandledAsyncCallback(
|
||||||
|
async (autogenerate: boolean) => {
|
||||||
|
if (!experiment.data) return;
|
||||||
|
await createScenarioMutation.mutateAsync({
|
||||||
|
experimentId: experiment.data.id,
|
||||||
|
autogenerate,
|
||||||
|
});
|
||||||
|
await utils.scenarios.list.invalidate();
|
||||||
|
},
|
||||||
|
[createScenarioMutation],
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<GridItem
|
<HStack w="100%" pb={cellPadding.y} pt={0} align="center" spacing={0}>
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
<Text fontSize={16} fontWeight="bold">
|
||||||
ref={ref as any}
|
Scenarios ({scenarios.data?.count})
|
||||||
display="flex"
|
</Text>
|
||||||
alignItems="flex-end"
|
|
||||||
rowSpan={headerRows}
|
|
||||||
px={cellPadding.x}
|
|
||||||
py={cellPadding.y}
|
|
||||||
// Only display the part of the grid item that has content
|
|
||||||
sx={{ ...stickyHeaderStyle, top: topValue }}
|
|
||||||
>
|
|
||||||
<HStack w="100%">
|
|
||||||
<Heading size="xs" fontWeight="bold" flex={1}>
|
|
||||||
Scenarios ({numScenarios})
|
|
||||||
</Heading>
|
|
||||||
{canModify && (
|
{canModify && (
|
||||||
<Button
|
<Menu>
|
||||||
size="xs"
|
<MenuButton
|
||||||
|
as={IconButton}
|
||||||
|
mt={1}
|
||||||
variant="ghost"
|
variant="ghost"
|
||||||
color="gray.500"
|
aria-label="Edit Scenarios"
|
||||||
aria-label="Edit"
|
icon={<Icon as={loading ? Spinner : BsGear} />}
|
||||||
leftIcon={<BsPencil />}
|
/>
|
||||||
onClick={openDrawer}
|
<MenuList fontSize="md" zIndex="dropdown" mt={-3}>
|
||||||
|
<MenuItem
|
||||||
|
icon={<Icon as={BsPlus} boxSize={6} mx="-5px" />}
|
||||||
|
onClick={() => onAddScenario(false)}
|
||||||
>
|
>
|
||||||
|
Add Scenario
|
||||||
|
</MenuItem>
|
||||||
|
<MenuItem icon={<BsStars />} onClick={() => onAddScenario(true)}>
|
||||||
|
Autogenerate Scenario
|
||||||
|
</MenuItem>
|
||||||
|
<MenuItem icon={<BsPencil />} onClick={openDrawer}>
|
||||||
Edit Vars
|
Edit Vars
|
||||||
</Button>
|
</MenuItem>
|
||||||
|
</MenuList>
|
||||||
|
</Menu>
|
||||||
)}
|
)}
|
||||||
</HStack>
|
</HStack>
|
||||||
</GridItem>
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,17 +1,52 @@
|
|||||||
import { Box, Button, HStack, Spinner, Tooltip, useToast, Text } from "@chakra-ui/react";
|
import {
|
||||||
import { useRef, useEffect, useState, useCallback } from "react";
|
Box,
|
||||||
import { useExperimentAccess, useHandledAsyncCallback, useModifierKeyLabel } from "~/utils/hooks";
|
Button,
|
||||||
import { type PromptVariant } from "./types";
|
HStack,
|
||||||
import { api } from "~/utils/api";
|
IconButton,
|
||||||
|
Spinner,
|
||||||
|
Text,
|
||||||
|
Tooltip,
|
||||||
|
useToast,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
|
import { useCallback, useEffect, useRef, useState } from "react";
|
||||||
|
import { FiMaximize, FiMinimize } from "react-icons/fi";
|
||||||
|
import { editorBackground } from "~/state/sharedVariantEditor.slice";
|
||||||
import { useAppStore } from "~/state/store";
|
import { useAppStore } from "~/state/store";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import {
|
||||||
|
useExperimentAccess,
|
||||||
|
useHandledAsyncCallback,
|
||||||
|
useModifierKeyLabel,
|
||||||
|
useVisibleScenarioIds,
|
||||||
|
} from "~/utils/hooks";
|
||||||
|
import { type PromptVariant } from "./types";
|
||||||
|
|
||||||
export default function VariantEditor(props: { variant: PromptVariant }) {
|
export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||||
const { canModify } = useExperimentAccess();
|
const { canModify } = useExperimentAccess();
|
||||||
const monaco = useAppStore.use.sharedVariantEditor.monaco();
|
const monaco = useAppStore.use.sharedVariantEditor.monaco();
|
||||||
const editorRef = useRef<ReturnType<NonNullable<typeof monaco>["editor"]["create"]> | null>(null);
|
const editorRef = useRef<ReturnType<NonNullable<typeof monaco>["editor"]["create"]> | null>(null);
|
||||||
|
const containerRef = useRef<HTMLDivElement | null>(null);
|
||||||
const [editorId] = useState(() => `editor_${Math.random().toString(36).substring(7)}`);
|
const [editorId] = useState(() => `editor_${Math.random().toString(36).substring(7)}`);
|
||||||
const [isChanged, setIsChanged] = useState(false);
|
const [isChanged, setIsChanged] = useState(false);
|
||||||
|
|
||||||
|
const [isFullscreen, setIsFullscreen] = useState(false);
|
||||||
|
|
||||||
|
const toggleFullscreen = useCallback(() => {
|
||||||
|
setIsFullscreen((prev) => !prev);
|
||||||
|
editorRef.current?.focus();
|
||||||
|
}, [setIsFullscreen]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const handleEsc = (event: KeyboardEvent) => {
|
||||||
|
if (event.key === "Escape" && isFullscreen) {
|
||||||
|
toggleFullscreen();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
window.addEventListener("keydown", handleEsc);
|
||||||
|
return () => window.removeEventListener("keydown", handleEsc);
|
||||||
|
}, [isFullscreen, toggleFullscreen]);
|
||||||
|
|
||||||
const lastSavedFn = props.variant.constructFn;
|
const lastSavedFn = props.variant.constructFn;
|
||||||
|
|
||||||
const modifierKey = useModifierKeyLabel();
|
const modifierKey = useModifierKeyLabel();
|
||||||
@@ -33,6 +68,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
|||||||
const replaceVariant = api.promptVariants.replaceVariant.useMutation();
|
const replaceVariant = api.promptVariants.replaceVariant.useMutation();
|
||||||
const utils = api.useContext();
|
const utils = api.useContext();
|
||||||
const toast = useToast();
|
const toast = useToast();
|
||||||
|
const visibleScenarios = useVisibleScenarioIds();
|
||||||
|
|
||||||
const [onSave, saveInProgress] = useHandledAsyncCallback(async () => {
|
const [onSave, saveInProgress] = useHandledAsyncCallback(async () => {
|
||||||
if (!editorRef.current) return;
|
if (!editorRef.current) return;
|
||||||
@@ -48,13 +84,11 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
|||||||
if (!model) return;
|
if (!model) return;
|
||||||
|
|
||||||
// Make sure the user defined the prompt with the string "prompt\w*=" somewhere
|
// Make sure the user defined the prompt with the string "prompt\w*=" somewhere
|
||||||
const promptRegex = /prompt\s*=/;
|
const promptRegex = /definePrompt\(/;
|
||||||
if (!promptRegex.test(currentFn)) {
|
if (!promptRegex.test(currentFn)) {
|
||||||
console.log("no prompt");
|
|
||||||
console.log(currentFn);
|
|
||||||
toast({
|
toast({
|
||||||
title: "Missing prompt",
|
title: "Missing prompt",
|
||||||
description: "Please define the prompt (eg. `prompt = { ...`).",
|
description: "Please define the prompt (eg. `definePrompt(...`",
|
||||||
status: "error",
|
status: "error",
|
||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
@@ -63,6 +97,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
|||||||
const resp = await replaceVariant.mutateAsync({
|
const resp = await replaceVariant.mutateAsync({
|
||||||
id: props.variant.id,
|
id: props.variant.id,
|
||||||
constructFn: currentFn,
|
constructFn: currentFn,
|
||||||
|
streamScenarios: visibleScenarios,
|
||||||
});
|
});
|
||||||
if (resp.status === "error") {
|
if (resp.status === "error") {
|
||||||
return toast({
|
return toast({
|
||||||
@@ -101,11 +136,23 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
|||||||
readOnly: !canModify,
|
readOnly: !canModify,
|
||||||
});
|
});
|
||||||
|
|
||||||
editorRef.current.onDidFocusEditorText(() => {
|
// Workaround because otherwise the commands only work on whatever
|
||||||
// Workaround because otherwise the command only works on whatever
|
|
||||||
// editor was loaded on the page last.
|
// editor was loaded on the page last.
|
||||||
// https://github.com/microsoft/monaco-editor/issues/2947#issuecomment-1422265201
|
// https://github.com/microsoft/monaco-editor/issues/2947#issuecomment-1422265201
|
||||||
editorRef.current?.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.Enter, onSave);
|
editorRef.current.onDidFocusEditorText(() => {
|
||||||
|
editorRef.current?.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyS, onSave);
|
||||||
|
|
||||||
|
editorRef.current?.addCommand(
|
||||||
|
monaco.KeyMod.CtrlCmd | monaco.KeyMod.Shift | monaco.KeyCode.KeyF,
|
||||||
|
toggleFullscreen,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Exit fullscreen with escape
|
||||||
|
editorRef.current?.addCommand(monaco.KeyCode.Escape, () => {
|
||||||
|
if (isFullscreen) {
|
||||||
|
toggleFullscreen();
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
editorRef.current.onDidChangeModelContent(checkForChanges);
|
editorRef.current.onDidChangeModelContent(checkForChanges);
|
||||||
@@ -134,8 +181,40 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
|||||||
}, [canModify]);
|
}, [canModify]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Box w="100%" pos="relative">
|
<Box
|
||||||
<div id={editorId} style={{ height: "400px", width: "100%" }}></div>
|
w="100%"
|
||||||
|
ref={containerRef}
|
||||||
|
sx={
|
||||||
|
isFullscreen
|
||||||
|
? {
|
||||||
|
position: "fixed",
|
||||||
|
top: 0,
|
||||||
|
left: 0,
|
||||||
|
right: 0,
|
||||||
|
bottom: 0,
|
||||||
|
}
|
||||||
|
: { h: "400px", w: "100%" }
|
||||||
|
}
|
||||||
|
bgColor={editorBackground}
|
||||||
|
zIndex={isFullscreen ? 1000 : "unset"}
|
||||||
|
pos="relative"
|
||||||
|
_hover={{ ".fullscreen-toggle": { opacity: 1 } }}
|
||||||
|
>
|
||||||
|
<Box id={editorId} w="100%" h="100%" />
|
||||||
|
<Tooltip label={`${modifierKey} + ⇧ + F`}>
|
||||||
|
<IconButton
|
||||||
|
className="fullscreen-toggle"
|
||||||
|
aria-label="Minimize"
|
||||||
|
icon={isFullscreen ? <FiMinimize /> : <FiMaximize />}
|
||||||
|
position="absolute"
|
||||||
|
top={2}
|
||||||
|
right={2}
|
||||||
|
onClick={toggleFullscreen}
|
||||||
|
opacity={0}
|
||||||
|
transition="opacity 0.2s"
|
||||||
|
/>
|
||||||
|
</Tooltip>
|
||||||
|
|
||||||
{isChanged && (
|
{isChanged && (
|
||||||
<HStack pos="absolute" bottom={2} right={2}>
|
<HStack pos="absolute" bottom={2} right={2}>
|
||||||
<Button
|
<Button
|
||||||
@@ -148,7 +227,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
|||||||
>
|
>
|
||||||
Reset
|
Reset
|
||||||
</Button>
|
</Button>
|
||||||
<Tooltip label={`${modifierKey} + Enter`}>
|
<Tooltip label={`${modifierKey} + S`}>
|
||||||
<Button size="sm" onClick={onSave} colorScheme="blue" w={16} disabled={saveInProgress}>
|
<Button size="sm" onClick={onSave} colorScheme="blue" w={16} disabled={saveInProgress}>
|
||||||
{saveInProgress ? <Spinner boxSize={4} /> : <Text>Save</Text>}
|
{saveInProgress ? <Spinner boxSize={4} /> : <Text>Save</Text>}
|
||||||
</Button>
|
</Button>
|
||||||
|
|||||||
@@ -21,17 +21,14 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
|||||||
completionTokens: 0,
|
completionTokens: 0,
|
||||||
scenarioCount: 0,
|
scenarioCount: 0,
|
||||||
outputCount: 0,
|
outputCount: 0,
|
||||||
awaitingRetrievals: false,
|
awaitingEvals: false,
|
||||||
},
|
},
|
||||||
refetchInterval,
|
refetchInterval,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Poll every two seconds while we are waiting for LLM retrievals to finish
|
// Poll every two seconds while we are waiting for LLM retrievals to finish
|
||||||
useEffect(
|
useEffect(() => setRefetchInterval(data.awaitingEvals ? 5000 : 0), [data.awaitingEvals]);
|
||||||
() => setRefetchInterval(data.awaitingRetrievals ? 2000 : 0),
|
|
||||||
[data.awaitingRetrievals],
|
|
||||||
);
|
|
||||||
|
|
||||||
const [passColor, neutralColor, failColor] = useToken("colors", [
|
const [passColor, neutralColor, failColor] = useToken("colors", [
|
||||||
"green.500",
|
"green.500",
|
||||||
@@ -51,12 +48,12 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
|||||||
fontSize="xs"
|
fontSize="xs"
|
||||||
py={cellPadding.y}
|
py={cellPadding.y}
|
||||||
>
|
>
|
||||||
|
<HStack px={cellPadding.x}>
|
||||||
{showNumFinished && (
|
{showNumFinished && (
|
||||||
<Text>
|
<Text>
|
||||||
{data.outputCount} / {data.scenarioCount}
|
{data.outputCount} / {data.scenarioCount}
|
||||||
</Text>
|
</Text>
|
||||||
)}
|
)}
|
||||||
<HStack px={cellPadding.x}>
|
|
||||||
{data.evalResults.map((result) => {
|
{data.evalResults.map((result) => {
|
||||||
const passedFrac = result.passCount / result.totalCount;
|
const passedFrac = result.passCount / result.totalCount;
|
||||||
return (
|
return (
|
||||||
@@ -69,7 +66,7 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
|||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
</HStack>
|
</HStack>
|
||||||
{data.overallCost && !data.awaitingRetrievals && (
|
{data.overallCost && (
|
||||||
<CostTooltip
|
<CostTooltip
|
||||||
promptTokens={data.promptTokens}
|
promptTokens={data.promptTokens}
|
||||||
completionTokens={data.completionTokens}
|
completionTokens={data.completionTokens}
|
||||||
|
|||||||
@@ -1,13 +1,15 @@
|
|||||||
import { Grid, GridItem } from "@chakra-ui/react";
|
import { Grid, GridItem, type GridItemProps } from "@chakra-ui/react";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import NewScenarioButton from "./NewScenarioButton";
|
import AddVariantButton from "./AddVariantButton";
|
||||||
import NewVariantButton from "./NewVariantButton";
|
|
||||||
import ScenarioRow from "./ScenarioRow";
|
import ScenarioRow from "./ScenarioRow";
|
||||||
import VariantEditor from "./VariantEditor";
|
import VariantEditor from "./VariantEditor";
|
||||||
import VariantHeader from "../VariantHeader/VariantHeader";
|
import VariantHeader from "../VariantHeader/VariantHeader";
|
||||||
import VariantStats from "./VariantStats";
|
import VariantStats from "./VariantStats";
|
||||||
import { ScenariosHeader } from "./ScenariosHeader";
|
import { ScenariosHeader } from "./ScenariosHeader";
|
||||||
import { stickyHeaderStyle } from "./styles";
|
import { borders } from "./styles";
|
||||||
|
import { useScenarios } from "~/utils/hooks";
|
||||||
|
import ScenarioPaginator from "./ScenarioPaginator";
|
||||||
|
import { Fragment } from "react";
|
||||||
|
|
||||||
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
|
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
|
||||||
const variants = api.promptVariants.list.useQuery(
|
const variants = api.promptVariants.list.useQuery(
|
||||||
@@ -15,68 +17,91 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
|||||||
{ enabled: !!experimentId },
|
{ enabled: !!experimentId },
|
||||||
);
|
);
|
||||||
|
|
||||||
const scenarios = api.scenarios.list.useQuery(
|
const scenarios = useScenarios();
|
||||||
{ experimentId: experimentId as string },
|
|
||||||
{ enabled: !!experimentId },
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!variants.data || !scenarios.data) return null;
|
if (!variants.data || !scenarios.data) return null;
|
||||||
|
|
||||||
const allCols = variants.data.length + 1;
|
const allCols = variants.data.length + 2;
|
||||||
const headerRows = 3;
|
const variantHeaderRows = 3;
|
||||||
|
const scenarioHeaderRows = 1;
|
||||||
|
const scenarioFooterRows = 1;
|
||||||
|
const visibleScenariosCount = scenarios.data.scenarios.length;
|
||||||
|
const allRows =
|
||||||
|
variantHeaderRows + scenarioHeaderRows + visibleScenariosCount + scenarioFooterRows;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Grid
|
<Grid
|
||||||
p={4}
|
pt={4}
|
||||||
pb={24}
|
pb={24}
|
||||||
|
pl={8}
|
||||||
display="grid"
|
display="grid"
|
||||||
gridTemplateColumns={`250px repeat(${variants.data.length}, minmax(300px, 1fr)) auto`}
|
gridTemplateColumns={`250px repeat(${variants.data.length}, minmax(300px, 1fr)) auto`}
|
||||||
sx={{
|
sx={{
|
||||||
"> *": {
|
"> *": {
|
||||||
borderColor: "gray.300",
|
borderColor: "gray.300",
|
||||||
borderBottomWidth: 1,
|
|
||||||
borderRightWidth: 1,
|
|
||||||
},
|
},
|
||||||
}}
|
}}
|
||||||
fontSize="sm"
|
fontSize="sm"
|
||||||
>
|
>
|
||||||
<ScenariosHeader headerRows={headerRows} numScenarios={scenarios.data.length} />
|
<GridItem rowSpan={variantHeaderRows}>
|
||||||
|
<AddVariantButton />
|
||||||
{variants.data.map((variant) => (
|
|
||||||
<VariantHeader key={variant.uiId} variant={variant} canHide={variants.data.length > 1} />
|
|
||||||
))}
|
|
||||||
<GridItem
|
|
||||||
rowSpan={scenarios.data.length + headerRows}
|
|
||||||
padding={0}
|
|
||||||
// Have to use `style` instead of emotion style props to work around css specificity issues conflicting with the "> *" selector on Grid
|
|
||||||
style={{ borderRightWidth: 0, borderBottomWidth: 0 }}
|
|
||||||
h={8}
|
|
||||||
sx={stickyHeaderStyle}
|
|
||||||
>
|
|
||||||
<NewVariantButton />
|
|
||||||
</GridItem>
|
</GridItem>
|
||||||
|
|
||||||
{variants.data.map((variant) => (
|
{variants.data.map((variant, i) => {
|
||||||
<GridItem key={variant.uiId}>
|
const sharedProps: GridItemProps = {
|
||||||
|
...borders,
|
||||||
|
colStart: i + 2,
|
||||||
|
borderLeftWidth: i === 0 ? 1 : 0,
|
||||||
|
marginLeft: i === 0 ? "-1px" : 0,
|
||||||
|
backgroundColor: "gray.100",
|
||||||
|
};
|
||||||
|
return (
|
||||||
|
<Fragment key={variant.uiId}>
|
||||||
|
<VariantHeader
|
||||||
|
variant={variant}
|
||||||
|
canHide={variants.data.length > 1}
|
||||||
|
rowStart={1}
|
||||||
|
{...sharedProps}
|
||||||
|
/>
|
||||||
|
<GridItem rowStart={2} {...sharedProps}>
|
||||||
<VariantEditor variant={variant} />
|
<VariantEditor variant={variant} />
|
||||||
</GridItem>
|
</GridItem>
|
||||||
))}
|
<GridItem rowStart={3} {...sharedProps}>
|
||||||
{variants.data.map((variant) => (
|
|
||||||
<GridItem key={variant.uiId}>
|
|
||||||
<VariantStats variant={variant} />
|
<VariantStats variant={variant} />
|
||||||
</GridItem>
|
</GridItem>
|
||||||
))}
|
</Fragment>
|
||||||
{scenarios.data.map((scenario) => (
|
);
|
||||||
|
})}
|
||||||
|
|
||||||
|
<GridItem
|
||||||
|
colSpan={allCols - 1}
|
||||||
|
rowStart={variantHeaderRows + 1}
|
||||||
|
colStart={1}
|
||||||
|
{...borders}
|
||||||
|
borderRightWidth={0}
|
||||||
|
>
|
||||||
|
<ScenariosHeader />
|
||||||
|
</GridItem>
|
||||||
|
|
||||||
|
{scenarios.data.scenarios.map((scenario, i) => (
|
||||||
<ScenarioRow
|
<ScenarioRow
|
||||||
|
rowStart={i + variantHeaderRows + scenarioHeaderRows + 2}
|
||||||
key={scenario.uiId}
|
key={scenario.uiId}
|
||||||
scenario={scenario}
|
scenario={scenario}
|
||||||
variants={variants.data}
|
variants={variants.data}
|
||||||
canHide={scenarios.data.length > 1}
|
canHide={visibleScenariosCount > 1}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
<GridItem borderBottomWidth={0} borderRightWidth={0} w="100%" colSpan={allCols} padding={0}>
|
<GridItem
|
||||||
<NewScenarioButton />
|
rowStart={variantHeaderRows + scenarioHeaderRows + visibleScenariosCount + 2}
|
||||||
|
colStart={1}
|
||||||
|
colSpan={allCols}
|
||||||
|
>
|
||||||
|
<ScenarioPaginator />
|
||||||
</GridItem>
|
</GridItem>
|
||||||
|
|
||||||
|
{/* Add some extra padding on the right, because when the table is too wide to fit in the viewport `pr` on the Grid isn't respected. */}
|
||||||
|
<GridItem rowStart={1} colStart={allCols} rowSpan={allRows} w={4} borderBottomWidth={0} />
|
||||||
</Grid>
|
</Grid>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
import { type SystemStyleObject } from "@chakra-ui/react";
|
import { type GridItemProps } from "@chakra-ui/react";
|
||||||
|
|
||||||
export const stickyHeaderStyle: SystemStyleObject = {
|
export const borders: GridItemProps = {
|
||||||
position: "sticky",
|
borderRightWidth: 1,
|
||||||
top: "0",
|
borderBottomWidth: 1,
|
||||||
backgroundColor: "#fff",
|
|
||||||
zIndex: 1,
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -2,4 +2,4 @@ import { type RouterOutputs } from "~/utils/api";
|
|||||||
|
|
||||||
export type PromptVariant = NonNullable<RouterOutputs["promptVariants"]["list"]>[0];
|
export type PromptVariant = NonNullable<RouterOutputs["promptVariants"]["list"]>[0];
|
||||||
|
|
||||||
export type Scenario = NonNullable<RouterOutputs["scenarios"]["list"]>[0];
|
export type Scenario = NonNullable<RouterOutputs["scenarios"]["list"]>["scenarios"][0];
|
||||||
|
|||||||
@@ -1,18 +1,10 @@
|
|||||||
import { HStack, VStack } from "@chakra-ui/react";
|
import { type StackProps, VStack, useBreakpointValue } from "@chakra-ui/react";
|
||||||
import React from "react";
|
import React from "react";
|
||||||
import DiffViewer, { DiffMethod } from "react-diff-viewer";
|
import DiffViewer, { DiffMethod } from "react-diff-viewer";
|
||||||
import Prism from "prismjs";
|
import Prism from "prismjs";
|
||||||
import "prismjs/components/prism-javascript";
|
import "prismjs/components/prism-javascript";
|
||||||
import "prismjs/themes/prism.css"; // choose a theme you like
|
import "prismjs/themes/prism.css"; // choose a theme you like
|
||||||
|
|
||||||
const CompareFunctions = ({
|
|
||||||
originalFunction,
|
|
||||||
newFunction = "",
|
|
||||||
}: {
|
|
||||||
originalFunction: string;
|
|
||||||
newFunction?: string;
|
|
||||||
}) => {
|
|
||||||
console.log("newFunction", newFunction);
|
|
||||||
const highlightSyntax = (str: string) => {
|
const highlightSyntax = (str: string) => {
|
||||||
let highlighted;
|
let highlighted;
|
||||||
try {
|
try {
|
||||||
@@ -23,22 +15,44 @@ const CompareFunctions = ({
|
|||||||
}
|
}
|
||||||
return <pre style={{ display: "inline" }} dangerouslySetInnerHTML={{ __html: highlighted }} />;
|
return <pre style={{ display: "inline" }} dangerouslySetInnerHTML={{ __html: highlighted }} />;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const CompareFunctions = ({
|
||||||
|
originalFunction,
|
||||||
|
newFunction = "",
|
||||||
|
leftTitle = "Original",
|
||||||
|
rightTitle = "Modified",
|
||||||
|
...props
|
||||||
|
}: {
|
||||||
|
originalFunction: string;
|
||||||
|
newFunction?: string;
|
||||||
|
leftTitle?: string;
|
||||||
|
rightTitle?: string;
|
||||||
|
} & StackProps) => {
|
||||||
|
const showSplitView = useBreakpointValue(
|
||||||
|
{
|
||||||
|
base: false,
|
||||||
|
md: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
fallback: "base",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<HStack w="full" spacing={5}>
|
<VStack w="full" spacing={4} fontSize={12} lineHeight={1} overflowY="auto" {...props}>
|
||||||
<VStack w="full" spacing={4} maxH="65vh" fontSize={12} lineHeight={1} overflowY="auto">
|
|
||||||
<DiffViewer
|
<DiffViewer
|
||||||
oldValue={originalFunction}
|
oldValue={originalFunction}
|
||||||
newValue={newFunction || originalFunction}
|
newValue={newFunction || originalFunction}
|
||||||
splitView={true}
|
splitView={showSplitView}
|
||||||
hideLineNumbers={true}
|
hideLineNumbers={!showSplitView}
|
||||||
leftTitle="Original"
|
leftTitle={leftTitle}
|
||||||
rightTitle={newFunction ? "Modified" : "Unmodified"}
|
rightTitle={rightTitle}
|
||||||
disableWordDiff={true}
|
disableWordDiff={true}
|
||||||
compareMethod={DiffMethod.CHARS}
|
compareMethod={DiffMethod.CHARS}
|
||||||
renderContent={highlightSyntax}
|
renderContent={highlightSyntax}
|
||||||
|
showDiffOnly={false}
|
||||||
/>
|
/>
|
||||||
</VStack>
|
</VStack>
|
||||||
</HStack>
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
74
src/components/RefinePromptModal/CustomInstructionsInput.tsx
Normal file
74
src/components/RefinePromptModal/CustomInstructionsInput.tsx
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
import { Button, Spinner, InputGroup, InputRightElement, Icon, HStack } from "@chakra-ui/react";
|
||||||
|
import { IoMdSend } from "react-icons/io";
|
||||||
|
import AutoResizeTextArea from "../AutoResizeTextArea";
|
||||||
|
|
||||||
|
export const CustomInstructionsInput = ({
|
||||||
|
instructions,
|
||||||
|
setInstructions,
|
||||||
|
loading,
|
||||||
|
onSubmit,
|
||||||
|
}: {
|
||||||
|
instructions: string;
|
||||||
|
setInstructions: (instructions: string) => void;
|
||||||
|
loading: boolean;
|
||||||
|
onSubmit: () => void;
|
||||||
|
}) => {
|
||||||
|
return (
|
||||||
|
<InputGroup
|
||||||
|
size="md"
|
||||||
|
w="full"
|
||||||
|
maxW="600"
|
||||||
|
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);"
|
||||||
|
borderRadius={8}
|
||||||
|
alignItems="center"
|
||||||
|
colorScheme="orange"
|
||||||
|
>
|
||||||
|
<AutoResizeTextArea
|
||||||
|
value={instructions}
|
||||||
|
onChange={(e) => setInstructions(e.target.value)}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === "Enter" && !e.metaKey && !e.ctrlKey && !e.shiftKey) {
|
||||||
|
e.preventDefault();
|
||||||
|
e.currentTarget.blur();
|
||||||
|
onSubmit();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
placeholder="Send custom instructions"
|
||||||
|
py={4}
|
||||||
|
pl={4}
|
||||||
|
pr={12}
|
||||||
|
colorScheme="orange"
|
||||||
|
borderColor="gray.300"
|
||||||
|
borderWidth={1}
|
||||||
|
_hover={{
|
||||||
|
borderColor: "gray.300",
|
||||||
|
}}
|
||||||
|
_focus={{
|
||||||
|
borderColor: "gray.300",
|
||||||
|
}}
|
||||||
|
isDisabled={loading}
|
||||||
|
/>
|
||||||
|
<HStack></HStack>
|
||||||
|
<InputRightElement width="8" height="full">
|
||||||
|
<Button
|
||||||
|
h="8"
|
||||||
|
w="8"
|
||||||
|
minW="unset"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => onSubmit()}
|
||||||
|
variant={instructions ? "solid" : "ghost"}
|
||||||
|
mr={4}
|
||||||
|
borderRadius="8"
|
||||||
|
bgColor={instructions ? "orange.400" : "transparent"}
|
||||||
|
colorScheme="orange"
|
||||||
|
>
|
||||||
|
{loading ? (
|
||||||
|
<Spinner boxSize={4} />
|
||||||
|
) : (
|
||||||
|
<Icon as={IoMdSend} color={instructions ? "white" : "gray.500"} boxSize={5} />
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
</InputRightElement>
|
||||||
|
</InputGroup>
|
||||||
|
);
|
||||||
|
};
|
||||||
65
src/components/RefinePromptModal/RefineAction.tsx
Normal file
65
src/components/RefinePromptModal/RefineAction.tsx
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import { HStack, Icon, Heading, Text, VStack, GridItem } from "@chakra-ui/react";
|
||||||
|
import { type IconType } from "react-icons";
|
||||||
|
import { BsStars } from "react-icons/bs";
|
||||||
|
|
||||||
|
export const RefineAction = ({
|
||||||
|
label,
|
||||||
|
icon,
|
||||||
|
desciption,
|
||||||
|
activeLabel,
|
||||||
|
onClick,
|
||||||
|
loading,
|
||||||
|
}: {
|
||||||
|
label: string;
|
||||||
|
icon?: IconType;
|
||||||
|
desciption: string;
|
||||||
|
activeLabel: string | undefined;
|
||||||
|
onClick: (label: string) => void;
|
||||||
|
loading: boolean;
|
||||||
|
}) => {
|
||||||
|
const isActive = activeLabel === label;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<GridItem w="80" h="44">
|
||||||
|
<VStack
|
||||||
|
w="full"
|
||||||
|
h="full"
|
||||||
|
onClick={() => {
|
||||||
|
!loading && onClick(label);
|
||||||
|
}}
|
||||||
|
borderColor={isActive ? "blue.500" : "gray.200"}
|
||||||
|
borderWidth={2}
|
||||||
|
borderRadius={16}
|
||||||
|
padding={6}
|
||||||
|
backgroundColor="gray.50"
|
||||||
|
_hover={
|
||||||
|
loading
|
||||||
|
? undefined
|
||||||
|
: {
|
||||||
|
backgroundColor: "gray.100",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
spacing={8}
|
||||||
|
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);"
|
||||||
|
cursor="pointer"
|
||||||
|
opacity={loading ? 0.5 : 1}
|
||||||
|
>
|
||||||
|
<HStack cursor="pointer" spacing={6} fontSize="sm" fontWeight="medium" color="gray.500">
|
||||||
|
<Icon as={icon || BsStars} boxSize={12} />
|
||||||
|
<Heading size="md" fontFamily="inconsolata, monospace">
|
||||||
|
{label}
|
||||||
|
</Heading>
|
||||||
|
</HStack>
|
||||||
|
<Text
|
||||||
|
fontSize="sm"
|
||||||
|
color="gray.500"
|
||||||
|
flexWrap="wrap"
|
||||||
|
wordBreak="break-word"
|
||||||
|
overflowWrap="break-word"
|
||||||
|
>
|
||||||
|
{desciption}
|
||||||
|
</Text>
|
||||||
|
</VStack>
|
||||||
|
</GridItem>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -11,13 +11,20 @@ import {
|
|||||||
Text,
|
Text,
|
||||||
Spinner,
|
Spinner,
|
||||||
HStack,
|
HStack,
|
||||||
|
Icon,
|
||||||
|
SimpleGrid,
|
||||||
} from "@chakra-ui/react";
|
} from "@chakra-ui/react";
|
||||||
|
import { BsStars } from "react-icons/bs";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||||
import { type PromptVariant } from "@prisma/client";
|
import { type PromptVariant } from "@prisma/client";
|
||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
import AutoResizeTextArea from "../AutoResizeTextArea";
|
|
||||||
import CompareFunctions from "./CompareFunctions";
|
import CompareFunctions from "./CompareFunctions";
|
||||||
|
import { CustomInstructionsInput } from "./CustomInstructionsInput";
|
||||||
|
import { RefineAction } from "./RefineAction";
|
||||||
|
import { isObject, isString } from "lodash-es";
|
||||||
|
import { type RefinementAction, type SupportedProvider } from "~/modelProviders/types";
|
||||||
|
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
|
||||||
|
|
||||||
export const RefinePromptModal = ({
|
export const RefinePromptModal = ({
|
||||||
variant,
|
variant,
|
||||||
@@ -27,71 +34,112 @@ export const RefinePromptModal = ({
|
|||||||
onClose: () => void;
|
onClose: () => void;
|
||||||
}) => {
|
}) => {
|
||||||
const utils = api.useContext();
|
const utils = api.useContext();
|
||||||
|
const visibleScenarios = useVisibleScenarioIds();
|
||||||
|
|
||||||
const { mutateAsync: getRefinedPromptMutateAsync, data: refinedPromptFn } =
|
const refinementActions =
|
||||||
api.promptVariants.getRefinedPromptFn.useMutation();
|
frontendModelProviders[variant.modelProvider as SupportedProvider].refinementActions || {};
|
||||||
|
|
||||||
|
const { mutateAsync: getModifiedPromptMutateAsync, data: refinedPromptFn } =
|
||||||
|
api.promptVariants.getModifiedPromptFn.useMutation();
|
||||||
const [instructions, setInstructions] = useState<string>("");
|
const [instructions, setInstructions] = useState<string>("");
|
||||||
|
|
||||||
const [getRefinedPromptFn, refiningInProgress] = useHandledAsyncCallback(async () => {
|
const [activeRefineActionLabel, setActiveRefineActionLabel] = useState<string | undefined>(
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
const [getModifiedPromptFn, modificationInProgress] = useHandledAsyncCallback(
|
||||||
|
async (label?: string) => {
|
||||||
if (!variant.experimentId) return;
|
if (!variant.experimentId) return;
|
||||||
await getRefinedPromptMutateAsync({
|
const updatedInstructions = label
|
||||||
|
? (refinementActions[label] as RefinementAction).instructions
|
||||||
|
: instructions;
|
||||||
|
setActiveRefineActionLabel(label);
|
||||||
|
await getModifiedPromptMutateAsync({
|
||||||
id: variant.id,
|
id: variant.id,
|
||||||
instructions,
|
instructions: updatedInstructions,
|
||||||
});
|
});
|
||||||
}, [getRefinedPromptMutateAsync, onClose, variant, instructions]);
|
},
|
||||||
|
[getModifiedPromptMutateAsync, onClose, variant, instructions, setActiveRefineActionLabel],
|
||||||
|
);
|
||||||
|
|
||||||
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
|
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
|
||||||
|
|
||||||
const [replaceVariant, replacementInProgress] = useHandledAsyncCallback(async () => {
|
const [replaceVariant, replacementInProgress] = useHandledAsyncCallback(async () => {
|
||||||
if (!variant.experimentId || !refinedPromptFn) return;
|
if (
|
||||||
|
!variant.experimentId ||
|
||||||
|
!refinedPromptFn ||
|
||||||
|
(isObject(refinedPromptFn) && "status" in refinedPromptFn)
|
||||||
|
)
|
||||||
|
return;
|
||||||
await replaceVariantMutation.mutateAsync({
|
await replaceVariantMutation.mutateAsync({
|
||||||
id: variant.id,
|
id: variant.id,
|
||||||
constructFn: refinedPromptFn,
|
constructFn: refinedPromptFn,
|
||||||
|
streamScenarios: visibleScenarios,
|
||||||
});
|
});
|
||||||
await utils.promptVariants.list.invalidate();
|
await utils.promptVariants.list.invalidate();
|
||||||
onClose();
|
onClose();
|
||||||
}, [replaceVariantMutation, variant, onClose, refinedPromptFn]);
|
}, [replaceVariantMutation, variant, onClose, refinedPromptFn]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Modal isOpen onClose={onClose} size={{ base: "xl", sm: "2xl", md: "7xl" }}>
|
<Modal
|
||||||
|
isOpen
|
||||||
|
onClose={onClose}
|
||||||
|
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "5xl", xl: "7xl" }}
|
||||||
|
>
|
||||||
<ModalOverlay />
|
<ModalOverlay />
|
||||||
<ModalContent w={1200}>
|
<ModalContent w={1200}>
|
||||||
<ModalHeader>Refine Your Prompt</ModalHeader>
|
<ModalHeader>
|
||||||
|
<HStack>
|
||||||
|
<Icon as={BsStars} />
|
||||||
|
<Text>Refine with GPT-4</Text>
|
||||||
|
</HStack>
|
||||||
|
</ModalHeader>
|
||||||
<ModalCloseButton />
|
<ModalCloseButton />
|
||||||
<ModalBody maxW="unset">
|
<ModalBody maxW="unset">
|
||||||
<VStack spacing={8}>
|
<VStack spacing={8}>
|
||||||
<HStack w="full">
|
<VStack spacing={4}>
|
||||||
<AutoResizeTextArea
|
{Object.keys(refinementActions).length && (
|
||||||
value={instructions}
|
<>
|
||||||
onChange={(e) => setInstructions(e.target.value)}
|
<SimpleGrid columns={{ base: 1, md: 2 }} spacing={8}>
|
||||||
onKeyDown={(e) => {
|
{Object.keys(refinementActions).map((label) => (
|
||||||
if (e.key === "Enter" && !e.metaKey && !e.ctrlKey && !e.shiftKey) {
|
<RefineAction
|
||||||
e.preventDefault();
|
key={label}
|
||||||
e.currentTarget.blur();
|
label={label}
|
||||||
getRefinedPromptFn();
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
}
|
icon={refinementActions[label]!.icon}
|
||||||
}}
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
placeholder="Use chain of thought"
|
desciption={refinementActions[label]!.description}
|
||||||
|
activeLabel={activeRefineActionLabel}
|
||||||
|
onClick={getModifiedPromptFn}
|
||||||
|
loading={modificationInProgress}
|
||||||
/>
|
/>
|
||||||
<Button onClick={getRefinedPromptFn}>
|
))}
|
||||||
{refiningInProgress ? <Spinner boxSize={4} /> : <Text>Submit</Text>}
|
</SimpleGrid>
|
||||||
</Button>
|
<Text color="gray.500">or</Text>
|
||||||
</HStack>
|
</>
|
||||||
|
)}
|
||||||
|
<CustomInstructionsInput
|
||||||
|
instructions={instructions}
|
||||||
|
setInstructions={setInstructions}
|
||||||
|
loading={modificationInProgress}
|
||||||
|
onSubmit={getModifiedPromptFn}
|
||||||
|
/>
|
||||||
|
</VStack>
|
||||||
<CompareFunctions
|
<CompareFunctions
|
||||||
originalFunction={variant.constructFn}
|
originalFunction={variant.constructFn}
|
||||||
newFunction={refinedPromptFn}
|
newFunction={isString(refinedPromptFn) ? refinedPromptFn : undefined}
|
||||||
|
maxH="40vh"
|
||||||
/>
|
/>
|
||||||
</VStack>
|
</VStack>
|
||||||
</ModalBody>
|
</ModalBody>
|
||||||
|
|
||||||
<ModalFooter>
|
<ModalFooter>
|
||||||
<HStack spacing={4}>
|
<HStack spacing={4}>
|
||||||
<Button onClick={onClose}>Cancel</Button>
|
|
||||||
<Button
|
<Button
|
||||||
colorScheme="blue"
|
colorScheme="blue"
|
||||||
onClick={replaceVariant}
|
onClick={replaceVariant}
|
||||||
minW={24}
|
minW={24}
|
||||||
disabled={!refinedPromptFn}
|
isDisabled={replacementInProgress || !refinedPromptFn}
|
||||||
>
|
>
|
||||||
{replacementInProgress ? <Spinner boxSize={4} /> : <Text>Accept</Text>}
|
{replacementInProgress ? <Spinner boxSize={4} /> : <Text>Accept</Text>}
|
||||||
</Button>
|
</Button>
|
||||||
|
|||||||
@@ -1,89 +0,0 @@
|
|||||||
import {
|
|
||||||
VStack,
|
|
||||||
Text,
|
|
||||||
HStack,
|
|
||||||
type StackProps,
|
|
||||||
GridItem,
|
|
||||||
SimpleGrid,
|
|
||||||
Link,
|
|
||||||
} from "@chakra-ui/react";
|
|
||||||
import { modelStats } from "~/server/modelStats";
|
|
||||||
import { type SupportedModel } from "~/server/types";
|
|
||||||
|
|
||||||
export const ModelStatsCard = ({ label, model }: { label: string; model: SupportedModel }) => {
|
|
||||||
const stats = modelStats[model];
|
|
||||||
return (
|
|
||||||
<VStack w="full" align="start">
|
|
||||||
<Text fontWeight="bold" fontSize="sm" textTransform="uppercase">
|
|
||||||
{label}
|
|
||||||
</Text>
|
|
||||||
|
|
||||||
<VStack w="full" spacing={6} bgColor="gray.100" p={4} borderRadius={4}>
|
|
||||||
<HStack w="full" align="flex-start">
|
|
||||||
<Text flex={1} fontSize="lg">
|
|
||||||
<Text as="span" color="gray.600">
|
|
||||||
{stats.provider} /{" "}
|
|
||||||
</Text>
|
|
||||||
<Text as="span" fontWeight="bold" color="gray.900">
|
|
||||||
{model}
|
|
||||||
</Text>
|
|
||||||
</Text>
|
|
||||||
<Link
|
|
||||||
href={stats.learnMoreUrl}
|
|
||||||
isExternal
|
|
||||||
color="blue.500"
|
|
||||||
fontWeight="bold"
|
|
||||||
fontSize="sm"
|
|
||||||
ml={2}
|
|
||||||
>
|
|
||||||
Learn More
|
|
||||||
</Link>
|
|
||||||
</HStack>
|
|
||||||
<SimpleGrid
|
|
||||||
w="full"
|
|
||||||
justifyContent="space-between"
|
|
||||||
alignItems="flex-start"
|
|
||||||
fontSize="sm"
|
|
||||||
columns={{ base: 2, md: 4 }}
|
|
||||||
>
|
|
||||||
<SelectedModelLabeledInfo label="Context" info={stats.contextLength} />
|
|
||||||
<SelectedModelLabeledInfo
|
|
||||||
label="Input"
|
|
||||||
info={
|
|
||||||
<Text>
|
|
||||||
${(stats.promptTokenPrice * 1000).toFixed(3)}
|
|
||||||
<Text color="gray.500"> / 1K tokens</Text>
|
|
||||||
</Text>
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
<SelectedModelLabeledInfo
|
|
||||||
label="Output"
|
|
||||||
info={
|
|
||||||
<Text>
|
|
||||||
${(stats.promptTokenPrice * 1000).toFixed(3)}
|
|
||||||
<Text color="gray.500"> / 1K tokens</Text>
|
|
||||||
</Text>
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
<SelectedModelLabeledInfo label="Speed" info={<Text>{stats.speed}</Text>} />
|
|
||||||
</SimpleGrid>
|
|
||||||
</VStack>
|
|
||||||
</VStack>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const SelectedModelLabeledInfo = ({
|
|
||||||
label,
|
|
||||||
info,
|
|
||||||
...props
|
|
||||||
}: {
|
|
||||||
label: string;
|
|
||||||
info: string | number | React.ReactElement;
|
|
||||||
} & StackProps) => (
|
|
||||||
<GridItem>
|
|
||||||
<VStack alignItems="flex-start" {...props}>
|
|
||||||
<Text fontWeight="bold">{label}</Text>
|
|
||||||
<Text>{info}</Text>
|
|
||||||
</VStack>
|
|
||||||
</GridItem>
|
|
||||||
);
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
import {
|
|
||||||
Button,
|
|
||||||
Modal,
|
|
||||||
ModalBody,
|
|
||||||
ModalCloseButton,
|
|
||||||
ModalContent,
|
|
||||||
ModalFooter,
|
|
||||||
ModalHeader,
|
|
||||||
ModalOverlay,
|
|
||||||
VStack,
|
|
||||||
Text,
|
|
||||||
Spinner,
|
|
||||||
} from "@chakra-ui/react";
|
|
||||||
import { useState } from "react";
|
|
||||||
import { type SupportedModel } from "~/server/types";
|
|
||||||
import { ModelStatsCard } from "./ModelStatsCard";
|
|
||||||
import { SelectModelSearch } from "./SelectModelSearch";
|
|
||||||
import { api } from "~/utils/api";
|
|
||||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
|
||||||
|
|
||||||
export const SelectModelModal = ({
|
|
||||||
originalModel,
|
|
||||||
variantId,
|
|
||||||
onClose,
|
|
||||||
}: {
|
|
||||||
originalModel: SupportedModel;
|
|
||||||
variantId: string;
|
|
||||||
onClose: () => void;
|
|
||||||
}) => {
|
|
||||||
const [selectedModel, setSelectedModel] = useState<SupportedModel>(originalModel);
|
|
||||||
const utils = api.useContext();
|
|
||||||
|
|
||||||
const experiment = useExperiment();
|
|
||||||
|
|
||||||
const createMutation = api.promptVariants.create.useMutation();
|
|
||||||
|
|
||||||
const [createNewVariant, creationInProgress] = useHandledAsyncCallback(async () => {
|
|
||||||
if (!experiment?.data?.id) return;
|
|
||||||
await createMutation.mutateAsync({
|
|
||||||
experimentId: experiment?.data?.id,
|
|
||||||
variantId,
|
|
||||||
newModel: selectedModel,
|
|
||||||
});
|
|
||||||
await utils.promptVariants.list.invalidate();
|
|
||||||
onClose();
|
|
||||||
}, [createMutation, experiment?.data?.id, variantId, onClose]);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Modal isOpen onClose={onClose} size={{ base: "xl", sm: "2xl", md: "3xl" }}>
|
|
||||||
<ModalOverlay />
|
|
||||||
<ModalContent w={1200}>
|
|
||||||
<ModalHeader>Select a New Model</ModalHeader>
|
|
||||||
<ModalCloseButton />
|
|
||||||
<ModalBody maxW="unset">
|
|
||||||
<VStack spacing={8}>
|
|
||||||
<ModelStatsCard label="Original Model" model={originalModel} />
|
|
||||||
{originalModel !== selectedModel && (
|
|
||||||
<ModelStatsCard label="New Model" model={selectedModel} />
|
|
||||||
)}
|
|
||||||
<SelectModelSearch selectedModel={selectedModel} setSelectedModel={setSelectedModel} />
|
|
||||||
</VStack>
|
|
||||||
</ModalBody>
|
|
||||||
|
|
||||||
<ModalFooter>
|
|
||||||
<Button
|
|
||||||
colorScheme="blue"
|
|
||||||
onClick={createNewVariant}
|
|
||||||
minW={24}
|
|
||||||
disabled={originalModel === selectedModel}
|
|
||||||
>
|
|
||||||
{creationInProgress ? <Spinner boxSize={4} /> : <Text>Continue</Text>}
|
|
||||||
</Button>
|
|
||||||
</ModalFooter>
|
|
||||||
</ModalContent>
|
|
||||||
</Modal>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
import { VStack, Text } from "@chakra-ui/react";
|
|
||||||
import { type LegacyRef, useCallback } from "react";
|
|
||||||
import Select, { type SingleValue } from "react-select";
|
|
||||||
import { type SupportedModel } from "~/server/types";
|
|
||||||
import { useElementDimensions } from "~/utils/hooks";
|
|
||||||
|
|
||||||
const modelOptions: { value: SupportedModel; label: string }[] = [
|
|
||||||
{ value: "gpt-3.5-turbo", label: "gpt-3.5-turbo" },
|
|
||||||
{ value: "gpt-3.5-turbo-0613", label: "gpt-3.5-turbo-0613" },
|
|
||||||
{ value: "gpt-3.5-turbo-16k", label: "gpt-3.5-turbo-16k" },
|
|
||||||
{ value: "gpt-3.5-turbo-16k-0613", label: "gpt-3.5-turbo-16k-0613" },
|
|
||||||
{ value: "gpt-4", label: "gpt-4" },
|
|
||||||
{ value: "gpt-4-0613", label: "gpt-4-0613" },
|
|
||||||
{ value: "gpt-4-32k", label: "gpt-4-32k" },
|
|
||||||
{ value: "gpt-4-32k-0613", label: "gpt-4-32k-0613" },
|
|
||||||
];
|
|
||||||
|
|
||||||
export const SelectModelSearch = ({
|
|
||||||
selectedModel,
|
|
||||||
setSelectedModel,
|
|
||||||
}: {
|
|
||||||
selectedModel: SupportedModel;
|
|
||||||
setSelectedModel: (model: SupportedModel) => void;
|
|
||||||
}) => {
|
|
||||||
const handleSelection = useCallback(
|
|
||||||
(option: SingleValue<{ value: SupportedModel; label: string }>) => {
|
|
||||||
if (!option) return;
|
|
||||||
setSelectedModel(option.value);
|
|
||||||
},
|
|
||||||
[setSelectedModel],
|
|
||||||
);
|
|
||||||
const selectedOption = modelOptions.find((option) => option.value === selectedModel);
|
|
||||||
|
|
||||||
const [containerRef, containerDimensions] = useElementDimensions();
|
|
||||||
|
|
||||||
return (
|
|
||||||
<VStack ref={containerRef as LegacyRef<HTMLDivElement>} w="full">
|
|
||||||
<Text>Browse Models</Text>
|
|
||||||
<Select
|
|
||||||
styles={{ control: (provided) => ({ ...provided, width: containerDimensions?.width }) }}
|
|
||||||
value={selectedOption}
|
|
||||||
options={modelOptions}
|
|
||||||
onChange={handleSelection}
|
|
||||||
/>
|
|
||||||
</VStack>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -3,28 +3,33 @@ import { type PromptVariant } from "../OutputsTable/types";
|
|||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { RiDraggable } from "react-icons/ri";
|
import { RiDraggable } from "react-icons/ri";
|
||||||
import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
import { HStack, Icon, Text, GridItem } from "@chakra-ui/react"; // Changed here
|
import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here
|
||||||
import { cellPadding, headerMinHeight } from "../constants";
|
import { cellPadding, headerMinHeight } from "../constants";
|
||||||
import AutoResizeTextArea from "../AutoResizeTextArea";
|
import AutoResizeTextArea from "../AutoResizeTextArea";
|
||||||
import { stickyHeaderStyle } from "../OutputsTable/styles";
|
|
||||||
import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
|
import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
|
||||||
|
|
||||||
export default function VariantHeader(props: { variant: PromptVariant; canHide: boolean }) {
|
export default function VariantHeader(
|
||||||
|
allProps: {
|
||||||
|
variant: PromptVariant;
|
||||||
|
canHide: boolean;
|
||||||
|
} & GridItemProps,
|
||||||
|
) {
|
||||||
|
const { variant, canHide, ...gridItemProps } = allProps;
|
||||||
const { canModify } = useExperimentAccess();
|
const { canModify } = useExperimentAccess();
|
||||||
const utils = api.useContext();
|
const utils = api.useContext();
|
||||||
const [isDragTarget, setIsDragTarget] = useState(false);
|
const [isDragTarget, setIsDragTarget] = useState(false);
|
||||||
const [isInputHovered, setIsInputHovered] = useState(false);
|
const [isInputHovered, setIsInputHovered] = useState(false);
|
||||||
const [label, setLabel] = useState(props.variant.label);
|
const [label, setLabel] = useState(variant.label);
|
||||||
|
|
||||||
const updateMutation = api.promptVariants.update.useMutation();
|
const updateMutation = api.promptVariants.update.useMutation();
|
||||||
const [onSaveLabel] = useHandledAsyncCallback(async () => {
|
const [onSaveLabel] = useHandledAsyncCallback(async () => {
|
||||||
if (label && label !== props.variant.label) {
|
if (label && label !== variant.label) {
|
||||||
await updateMutation.mutateAsync({
|
await updateMutation.mutateAsync({
|
||||||
id: props.variant.id,
|
id: variant.id,
|
||||||
updates: { label: label },
|
updates: { label: label },
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, [updateMutation, props.variant.id, props.variant.label, label]);
|
}, [updateMutation, variant.id, variant.label, label]);
|
||||||
|
|
||||||
const reorderMutation = api.promptVariants.reorder.useMutation();
|
const reorderMutation = api.promptVariants.reorder.useMutation();
|
||||||
const [onReorder] = useHandledAsyncCallback(
|
const [onReorder] = useHandledAsyncCallback(
|
||||||
@@ -32,7 +37,7 @@ export default function VariantHeader(props: { variant: PromptVariant; canHide:
|
|||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
setIsDragTarget(false);
|
setIsDragTarget(false);
|
||||||
const draggedId = e.dataTransfer.getData("text/plain");
|
const draggedId = e.dataTransfer.getData("text/plain");
|
||||||
const droppedId = props.variant.id;
|
const droppedId = variant.id;
|
||||||
if (!draggedId || !droppedId || draggedId === droppedId) return;
|
if (!draggedId || !droppedId || draggedId === droppedId) return;
|
||||||
await reorderMutation.mutateAsync({
|
await reorderMutation.mutateAsync({
|
||||||
draggedId,
|
draggedId,
|
||||||
@@ -40,16 +45,26 @@ export default function VariantHeader(props: { variant: PromptVariant; canHide:
|
|||||||
});
|
});
|
||||||
await utils.promptVariants.list.invalidate();
|
await utils.promptVariants.list.invalidate();
|
||||||
},
|
},
|
||||||
[reorderMutation, props.variant.id],
|
[reorderMutation, variant.id],
|
||||||
);
|
);
|
||||||
|
|
||||||
const [menuOpen, setMenuOpen] = useState(false);
|
const [menuOpen, setMenuOpen] = useState(false);
|
||||||
|
|
||||||
if (!canModify) {
|
if (!canModify) {
|
||||||
return (
|
return (
|
||||||
<GridItem padding={0} sx={stickyHeaderStyle} borderTopWidth={1}>
|
<GridItem
|
||||||
|
padding={0}
|
||||||
|
sx={{
|
||||||
|
position: "sticky",
|
||||||
|
top: "0",
|
||||||
|
// Ensure that the menu always appears above the sticky header of other variants
|
||||||
|
zIndex: menuOpen ? "dropdown" : 10,
|
||||||
|
}}
|
||||||
|
borderTopWidth={1}
|
||||||
|
{...gridItemProps}
|
||||||
|
>
|
||||||
<Text fontSize={16} fontWeight="bold" px={cellPadding.x} py={cellPadding.y}>
|
<Text fontSize={16} fontWeight="bold" px={cellPadding.x} py={cellPadding.y}>
|
||||||
{props.variant.label}
|
{variant.label}
|
||||||
</Text>
|
</Text>
|
||||||
</GridItem>
|
</GridItem>
|
||||||
);
|
);
|
||||||
@@ -59,19 +74,21 @@ export default function VariantHeader(props: { variant: PromptVariant; canHide:
|
|||||||
<GridItem
|
<GridItem
|
||||||
padding={0}
|
padding={0}
|
||||||
sx={{
|
sx={{
|
||||||
...stickyHeaderStyle,
|
position: "sticky",
|
||||||
|
top: "0",
|
||||||
// Ensure that the menu always appears above the sticky header of other variants
|
// Ensure that the menu always appears above the sticky header of other variants
|
||||||
zIndex: menuOpen ? "dropdown" : stickyHeaderStyle.zIndex,
|
zIndex: menuOpen ? "dropdown" : 10,
|
||||||
}}
|
}}
|
||||||
borderTopWidth={1}
|
borderTopWidth={1}
|
||||||
|
{...gridItemProps}
|
||||||
>
|
>
|
||||||
<HStack
|
<HStack
|
||||||
spacing={4}
|
spacing={2}
|
||||||
alignItems="flex-start"
|
alignItems="flex-start"
|
||||||
minH={headerMinHeight}
|
minH={headerMinHeight}
|
||||||
draggable={!isInputHovered}
|
draggable={!isInputHovered}
|
||||||
onDragStart={(e) => {
|
onDragStart={(e) => {
|
||||||
e.dataTransfer.setData("text/plain", props.variant.id);
|
e.dataTransfer.setData("text/plain", variant.id);
|
||||||
e.currentTarget.style.opacity = "0.4";
|
e.currentTarget.style.opacity = "0.4";
|
||||||
}}
|
}}
|
||||||
onDragEnd={(e) => {
|
onDragEnd={(e) => {
|
||||||
@@ -85,7 +102,8 @@ export default function VariantHeader(props: { variant: PromptVariant; canHide:
|
|||||||
setIsDragTarget(false);
|
setIsDragTarget(false);
|
||||||
}}
|
}}
|
||||||
onDrop={onReorder}
|
onDrop={onReorder}
|
||||||
backgroundColor={isDragTarget ? "gray.100" : "transparent"}
|
backgroundColor={isDragTarget ? "gray.200" : "gray.100"}
|
||||||
|
h="full"
|
||||||
>
|
>
|
||||||
<Icon
|
<Icon
|
||||||
as={RiDraggable}
|
as={RiDraggable}
|
||||||
@@ -112,8 +130,8 @@ export default function VariantHeader(props: { variant: PromptVariant; canHide:
|
|||||||
onMouseLeave={() => setIsInputHovered(false)}
|
onMouseLeave={() => setIsInputHovered(false)}
|
||||||
/>
|
/>
|
||||||
<VariantHeaderMenuButton
|
<VariantHeaderMenuButton
|
||||||
variant={props.variant}
|
variant={variant}
|
||||||
canHide={props.canHide}
|
canHide={canHide}
|
||||||
menuOpen={menuOpen}
|
menuOpen={menuOpen}
|
||||||
setMenuOpen={setMenuOpen}
|
setMenuOpen={setMenuOpen}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import { type PromptVariant } from "../OutputsTable/types";
|
import { type PromptVariant } from "../OutputsTable/types";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||||
import {
|
import {
|
||||||
Button,
|
|
||||||
Icon,
|
Icon,
|
||||||
Menu,
|
Menu,
|
||||||
MenuButton,
|
MenuButton,
|
||||||
@@ -11,15 +10,14 @@ import {
|
|||||||
MenuDivider,
|
MenuDivider,
|
||||||
Text,
|
Text,
|
||||||
Spinner,
|
Spinner,
|
||||||
|
IconButton,
|
||||||
} from "@chakra-ui/react";
|
} from "@chakra-ui/react";
|
||||||
import { BsFillTrashFill, BsGear } from "react-icons/bs";
|
import { BsFillTrashFill, BsGear, BsStars } from "react-icons/bs";
|
||||||
import { FaRegClone } from "react-icons/fa";
|
import { FaRegClone } from "react-icons/fa";
|
||||||
import { AiOutlineDiff } from "react-icons/ai";
|
|
||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
import { RefinePromptModal } from "../RefinePromptModal/RefinePromptModal";
|
import { RefinePromptModal } from "../RefinePromptModal/RefinePromptModal";
|
||||||
import { RiExchangeFundsFill } from "react-icons/ri";
|
import { RiExchangeFundsFill } from "react-icons/ri";
|
||||||
import { SelectModelModal } from "../SelectModelModal/SelectModelModal";
|
import { ChangeModelModal } from "../ChangeModelModal/ChangeModelModal";
|
||||||
import { type SupportedModel } from "~/server/types";
|
|
||||||
|
|
||||||
export default function VariantHeaderMenuButton({
|
export default function VariantHeaderMenuButton({
|
||||||
variant,
|
variant,
|
||||||
@@ -35,11 +33,13 @@ export default function VariantHeaderMenuButton({
|
|||||||
const utils = api.useContext();
|
const utils = api.useContext();
|
||||||
|
|
||||||
const duplicateMutation = api.promptVariants.create.useMutation();
|
const duplicateMutation = api.promptVariants.create.useMutation();
|
||||||
|
const visibleScenarios = useVisibleScenarioIds();
|
||||||
|
|
||||||
const [duplicateVariant, duplicationInProgress] = useHandledAsyncCallback(async () => {
|
const [duplicateVariant, duplicationInProgress] = useHandledAsyncCallback(async () => {
|
||||||
await duplicateMutation.mutateAsync({
|
await duplicateMutation.mutateAsync({
|
||||||
experimentId: variant.experimentId,
|
experimentId: variant.experimentId,
|
||||||
variantId: variant.id,
|
variantId: variant.id,
|
||||||
|
streamScenarios: visibleScenarios,
|
||||||
});
|
});
|
||||||
await utils.promptVariants.list.invalidate();
|
await utils.promptVariants.list.invalidate();
|
||||||
}, [duplicateMutation, variant.experimentId, variant.id]);
|
}, [duplicateMutation, variant.experimentId, variant.id]);
|
||||||
@@ -52,21 +52,18 @@ export default function VariantHeaderMenuButton({
|
|||||||
await utils.promptVariants.list.invalidate();
|
await utils.promptVariants.list.invalidate();
|
||||||
}, [hideMutation, variant.id]);
|
}, [hideMutation, variant.id]);
|
||||||
|
|
||||||
const [selectModelModalOpen, setSelectModelModalOpen] = useState(false);
|
const [changeModelModalOpen, setChangeModelModalOpen] = useState(false);
|
||||||
const [refinePromptModalOpen, setRefinePromptModalOpen] = useState(false);
|
const [refinePromptModalOpen, setRefinePromptModalOpen] = useState(false);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Menu isOpen={menuOpen} onOpen={() => setMenuOpen(true)} onClose={() => setMenuOpen(false)}>
|
<Menu isOpen={menuOpen} onOpen={() => setMenuOpen(true)} onClose={() => setMenuOpen(false)}>
|
||||||
{duplicationInProgress ? (
|
<MenuButton
|
||||||
<Spinner boxSize={4} mx={3} my={3} />
|
as={IconButton}
|
||||||
) : (
|
variant="ghost"
|
||||||
<MenuButton>
|
aria-label="Edit Scenarios"
|
||||||
<Button variant="ghost">
|
icon={<Icon as={duplicationInProgress ? Spinner : BsGear} />}
|
||||||
<Icon as={BsGear} />
|
/>
|
||||||
</Button>
|
|
||||||
</MenuButton>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<MenuList mt={-3} fontSize="md">
|
<MenuList mt={-3} fontSize="md">
|
||||||
<MenuItem icon={<Icon as={FaRegClone} boxSize={4} w={5} />} onClick={duplicateVariant}>
|
<MenuItem icon={<Icon as={FaRegClone} boxSize={4} w={5} />} onClick={duplicateVariant}>
|
||||||
@@ -74,12 +71,12 @@ export default function VariantHeaderMenuButton({
|
|||||||
</MenuItem>
|
</MenuItem>
|
||||||
<MenuItem
|
<MenuItem
|
||||||
icon={<Icon as={RiExchangeFundsFill} boxSize={5} />}
|
icon={<Icon as={RiExchangeFundsFill} boxSize={5} />}
|
||||||
onClick={() => setSelectModelModalOpen(true)}
|
onClick={() => setChangeModelModalOpen(true)}
|
||||||
>
|
>
|
||||||
Change Model
|
Change Model
|
||||||
</MenuItem>
|
</MenuItem>
|
||||||
<MenuItem
|
<MenuItem
|
||||||
icon={<Icon as={AiOutlineDiff} boxSize={5} />}
|
icon={<Icon as={BsStars} boxSize={5} />}
|
||||||
onClick={() => setRefinePromptModalOpen(true)}
|
onClick={() => setRefinePromptModalOpen(true)}
|
||||||
>
|
>
|
||||||
Refine
|
Refine
|
||||||
@@ -99,12 +96,8 @@ export default function VariantHeaderMenuButton({
|
|||||||
)}
|
)}
|
||||||
</MenuList>
|
</MenuList>
|
||||||
</Menu>
|
</Menu>
|
||||||
{selectModelModalOpen && (
|
{changeModelModalOpen && (
|
||||||
<SelectModelModal
|
<ChangeModelModal variant={variant} onClose={() => setChangeModelModalOpen(false)} />
|
||||||
originalModel={variant.model as SupportedModel}
|
|
||||||
variantId={variant.id}
|
|
||||||
onClose={() => setSelectModelModalOpen(false)}
|
|
||||||
/>
|
|
||||||
)}
|
)}
|
||||||
{refinePromptModalOpen && (
|
{refinePromptModalOpen && (
|
||||||
<RefinePromptModal variant={variant} onClose={() => setRefinePromptModalOpen(false)} />
|
<RefinePromptModal variant={variant} onClose={() => setRefinePromptModalOpen(false)} />
|
||||||
|
|||||||
@@ -1,4 +1,13 @@
|
|||||||
import { HStack, Icon, VStack, Text, Divider, Spinner, AspectRatio } from "@chakra-ui/react";
|
import {
|
||||||
|
HStack,
|
||||||
|
Icon,
|
||||||
|
VStack,
|
||||||
|
Text,
|
||||||
|
Divider,
|
||||||
|
Spinner,
|
||||||
|
AspectRatio,
|
||||||
|
SkeletonText,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
import { RiFlaskLine } from "react-icons/ri";
|
import { RiFlaskLine } from "react-icons/ri";
|
||||||
import { formatTimePast } from "~/utils/dayjs";
|
import { formatTimePast } from "~/utils/dayjs";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
@@ -93,3 +102,13 @@ export const NewExperimentCard = () => {
|
|||||||
</AspectRatio>
|
</AspectRatio>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const ExperimentCardSkeleton = () => (
|
||||||
|
<AspectRatio ratio={1.2} w="full">
|
||||||
|
<VStack align="center" borderColor="gray.200" borderWidth={1} p={4} bg="gray.50">
|
||||||
|
<SkeletonText noOfLines={1} w="80%" />
|
||||||
|
<SkeletonText noOfLines={2} w="60%" />
|
||||||
|
<SkeletonText noOfLines={1} w="80%" />
|
||||||
|
</VStack>
|
||||||
|
</AspectRatio>
|
||||||
|
);
|
||||||
|
|||||||
57
src/components/experiments/HeaderButtons/DeleteDialog.tsx
Normal file
57
src/components/experiments/HeaderButtons/DeleteDialog.tsx
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import {
|
||||||
|
Button,
|
||||||
|
AlertDialog,
|
||||||
|
AlertDialogBody,
|
||||||
|
AlertDialogFooter,
|
||||||
|
AlertDialogHeader,
|
||||||
|
AlertDialogContent,
|
||||||
|
AlertDialogOverlay,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
|
|
||||||
|
import { useRouter } from "next/router";
|
||||||
|
import { useRef } from "react";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
|
|
||||||
|
export const DeleteDialog = ({ onClose }: { onClose: () => void }) => {
|
||||||
|
const experiment = useExperiment();
|
||||||
|
const deleteMutation = api.experiments.delete.useMutation();
|
||||||
|
const utils = api.useContext();
|
||||||
|
const router = useRouter();
|
||||||
|
|
||||||
|
const cancelRef = useRef<HTMLButtonElement>(null);
|
||||||
|
|
||||||
|
const [onDeleteConfirm] = useHandledAsyncCallback(async () => {
|
||||||
|
if (!experiment.data?.id) return;
|
||||||
|
await deleteMutation.mutateAsync({ id: experiment.data.id });
|
||||||
|
await utils.experiments.list.invalidate();
|
||||||
|
await router.push({ pathname: "/experiments" });
|
||||||
|
onClose();
|
||||||
|
}, [deleteMutation, experiment.data?.id, router]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<AlertDialog isOpen leastDestructiveRef={cancelRef} onClose={onClose}>
|
||||||
|
<AlertDialogOverlay>
|
||||||
|
<AlertDialogContent>
|
||||||
|
<AlertDialogHeader fontSize="lg" fontWeight="bold">
|
||||||
|
Delete Experiment
|
||||||
|
</AlertDialogHeader>
|
||||||
|
|
||||||
|
<AlertDialogBody>
|
||||||
|
If you delete this experiment all the associated prompts and scenarios will be deleted
|
||||||
|
as well. Are you sure?
|
||||||
|
</AlertDialogBody>
|
||||||
|
|
||||||
|
<AlertDialogFooter>
|
||||||
|
<Button ref={cancelRef} onClick={onClose}>
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
<Button colorScheme="red" onClick={onDeleteConfirm} ml={3}>
|
||||||
|
Delete
|
||||||
|
</Button>
|
||||||
|
</AlertDialogFooter>
|
||||||
|
</AlertDialogContent>
|
||||||
|
</AlertDialogOverlay>
|
||||||
|
</AlertDialog>
|
||||||
|
);
|
||||||
|
};
|
||||||
42
src/components/experiments/HeaderButtons/HeaderButtons.tsx
Normal file
42
src/components/experiments/HeaderButtons/HeaderButtons.tsx
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import { Button, HStack, Icon, Spinner, Text } from "@chakra-ui/react";
|
||||||
|
import { useOnForkButtonPressed } from "./useOnForkButtonPressed";
|
||||||
|
import { useExperiment } from "~/utils/hooks";
|
||||||
|
import { BsGearFill } from "react-icons/bs";
|
||||||
|
import { TbGitFork } from "react-icons/tb";
|
||||||
|
import { useAppStore } from "~/state/store";
|
||||||
|
|
||||||
|
export const HeaderButtons = () => {
|
||||||
|
const experiment = useExperiment();
|
||||||
|
|
||||||
|
const canModify = experiment.data?.access.canModify ?? false;
|
||||||
|
|
||||||
|
const { onForkButtonPressed, isForking } = useOnForkButtonPressed();
|
||||||
|
|
||||||
|
const openDrawer = useAppStore((s) => s.openDrawer);
|
||||||
|
|
||||||
|
if (experiment.isLoading) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<HStack spacing={0} mt={{ base: 2, md: 0 }}>
|
||||||
|
<Button
|
||||||
|
onClick={onForkButtonPressed}
|
||||||
|
mr={4}
|
||||||
|
colorScheme={canModify ? undefined : "orange"}
|
||||||
|
bgColor={canModify ? undefined : "orange.400"}
|
||||||
|
minW={0}
|
||||||
|
variant={{ base: "solid", md: canModify ? "ghost" : "solid" }}
|
||||||
|
>
|
||||||
|
{isForking ? <Spinner boxSize={5} /> : <Icon as={TbGitFork} boxSize={5} />}
|
||||||
|
<Text ml={2}>Fork</Text>
|
||||||
|
</Button>
|
||||||
|
{canModify && (
|
||||||
|
<Button variant={{ base: "solid", md: "ghost" }} onClick={openDrawer}>
|
||||||
|
<HStack>
|
||||||
|
<Icon as={BsGearFill} />
|
||||||
|
<Text>Settings</Text>
|
||||||
|
</HStack>
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</HStack>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
import { useCallback } from "react";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
|
import { signIn, useSession } from "next-auth/react";
|
||||||
|
import { useRouter } from "next/router";
|
||||||
|
|
||||||
|
export const useOnForkButtonPressed = () => {
|
||||||
|
const router = useRouter();
|
||||||
|
|
||||||
|
const user = useSession().data;
|
||||||
|
const experiment = useExperiment();
|
||||||
|
|
||||||
|
const forkMutation = api.experiments.fork.useMutation();
|
||||||
|
|
||||||
|
const [onFork, isForking] = useHandledAsyncCallback(async () => {
|
||||||
|
if (!experiment.data?.id) return;
|
||||||
|
const forkedExperimentId = await forkMutation.mutateAsync({ id: experiment.data.id });
|
||||||
|
await router.push({ pathname: "/experiments/[id]", query: { id: forkedExperimentId } });
|
||||||
|
}, [forkMutation, experiment.data?.id, router]);
|
||||||
|
|
||||||
|
const onForkButtonPressed = useCallback(() => {
|
||||||
|
if (user === null) {
|
||||||
|
signIn("github").catch(console.error);
|
||||||
|
} else {
|
||||||
|
onFork();
|
||||||
|
}
|
||||||
|
}, [onFork, user]);
|
||||||
|
|
||||||
|
return { onForkButtonPressed, isForking };
|
||||||
|
};
|
||||||
@@ -84,7 +84,11 @@ const NavSidebar = () => {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</VStack>
|
</VStack>
|
||||||
{user ? <UserMenu user={user} /> : <Divider />}
|
{user ? (
|
||||||
|
<UserMenu user={user} borderColor={"gray.200"} borderTopWidth={1} borderBottomWidth={1} />
|
||||||
|
) : (
|
||||||
|
<Divider />
|
||||||
|
)}
|
||||||
<VStack spacing={0} align="center">
|
<VStack spacing={0} align="center">
|
||||||
<Link
|
<Link
|
||||||
href="https://github.com/openpipe/openpipe"
|
href="https://github.com/openpipe/openpipe"
|
||||||
|
|||||||
@@ -8,12 +8,16 @@ import {
|
|||||||
PopoverTrigger,
|
PopoverTrigger,
|
||||||
PopoverContent,
|
PopoverContent,
|
||||||
Link,
|
Link,
|
||||||
|
useColorMode,
|
||||||
|
type StackProps,
|
||||||
} from "@chakra-ui/react";
|
} from "@chakra-ui/react";
|
||||||
import { type Session } from "next-auth";
|
import { type Session } from "next-auth";
|
||||||
import { signOut } from "next-auth/react";
|
import { signOut } from "next-auth/react";
|
||||||
import { BsBoxArrowRight, BsChevronRight, BsPersonCircle } from "react-icons/bs";
|
import { BsBoxArrowRight, BsChevronRight, BsPersonCircle } from "react-icons/bs";
|
||||||
|
|
||||||
export default function UserMenu({ user }: { user: Session }) {
|
export default function UserMenu({ user, ...rest }: { user: Session } & StackProps) {
|
||||||
|
const { colorMode } = useColorMode();
|
||||||
|
|
||||||
const profileImage = user.user.image ? (
|
const profileImage = user.user.image ? (
|
||||||
<Image src={user.user.image} alt="profile picture" boxSize={8} borderRadius="50%" />
|
<Image src={user.user.image} alt="profile picture" boxSize={8} borderRadius="50%" />
|
||||||
) : (
|
) : (
|
||||||
@@ -29,12 +33,10 @@ export default function UserMenu({ user }: { user: Session }) {
|
|||||||
px={3}
|
px={3}
|
||||||
spacing={3}
|
spacing={3}
|
||||||
py={2}
|
py={2}
|
||||||
borderColor={"gray.200"}
|
{...rest}
|
||||||
borderTopWidth={1}
|
|
||||||
borderBottomWidth={1}
|
|
||||||
cursor="pointer"
|
cursor="pointer"
|
||||||
_hover={{
|
_hover={{
|
||||||
bgColor: "gray.200",
|
bgColor: colorMode === "light" ? "gray.200" : "gray.700",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{profileImage}
|
{profileImage}
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ export const env = createEnv({
|
|||||||
server: {
|
server: {
|
||||||
DATABASE_URL: z.string().url(),
|
DATABASE_URL: z.string().url(),
|
||||||
NODE_ENV: z.enum(["development", "test", "production"]).default("development"),
|
NODE_ENV: z.enum(["development", "test", "production"]).default("development"),
|
||||||
OPENAI_API_KEY: z.string().min(1),
|
|
||||||
RESTRICT_PRISMA_LOGS: z
|
RESTRICT_PRISMA_LOGS: z
|
||||||
.string()
|
.string()
|
||||||
.optional()
|
.optional()
|
||||||
@@ -17,6 +16,9 @@ export const env = createEnv({
|
|||||||
.transform((val) => val.toLowerCase() === "true"),
|
.transform((val) => val.toLowerCase() === "true"),
|
||||||
GITHUB_CLIENT_ID: z.string().min(1),
|
GITHUB_CLIENT_ID: z.string().min(1),
|
||||||
GITHUB_CLIENT_SECRET: z.string().min(1),
|
GITHUB_CLIENT_SECRET: z.string().min(1),
|
||||||
|
OPENAI_API_KEY: z.string().min(1),
|
||||||
|
REPLICATE_API_TOKEN: z.string().default("placeholder"),
|
||||||
|
ANTHROPIC_API_KEY: z.string().default("placeholder"),
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -42,6 +44,8 @@ export const env = createEnv({
|
|||||||
NEXT_PUBLIC_SOCKET_URL: process.env.NEXT_PUBLIC_SOCKET_URL,
|
NEXT_PUBLIC_SOCKET_URL: process.env.NEXT_PUBLIC_SOCKET_URL,
|
||||||
GITHUB_CLIENT_ID: process.env.GITHUB_CLIENT_ID,
|
GITHUB_CLIENT_ID: process.env.GITHUB_CLIENT_ID,
|
||||||
GITHUB_CLIENT_SECRET: process.env.GITHUB_CLIENT_SECRET,
|
GITHUB_CLIENT_SECRET: process.env.GITHUB_CLIENT_SECRET,
|
||||||
|
REPLICATE_API_TOKEN: process.env.REPLICATE_API_TOKEN,
|
||||||
|
ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY,
|
||||||
},
|
},
|
||||||
/**
|
/**
|
||||||
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.
|
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.
|
||||||
|
|||||||
69
src/modelProviders/anthropic/codegen/codegen.ts
Normal file
69
src/modelProviders/anthropic/codegen/codegen.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||||
|
|
||||||
|
import YAML from "yaml";
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
import { openapiSchemaToJsonSchema } from "@openapi-contrib/openapi-schema-to-json-schema";
|
||||||
|
import $RefParser from "@apidevtools/json-schema-ref-parser";
|
||||||
|
import { type JSONObject } from "superjson/dist/types";
|
||||||
|
import assert from "assert";
|
||||||
|
import { type JSONSchema4Object } from "json-schema";
|
||||||
|
import { isObject } from "lodash-es";
|
||||||
|
|
||||||
|
// @ts-expect-error for some reason missing from types
|
||||||
|
import parserEstree from "prettier/plugins/estree";
|
||||||
|
import parserBabel from "prettier/plugins/babel";
|
||||||
|
import prettier from "prettier/standalone";
|
||||||
|
|
||||||
|
const OPENAPI_URL =
|
||||||
|
"https://raw.githubusercontent.com/tryAGI/Anthropic/1c0871e861de60a4c3a843cb90e17d63e86c234a/docs/openapi.yaml";
|
||||||
|
|
||||||
|
// Fetch the openapi document
|
||||||
|
const response = await fetch(OPENAPI_URL);
|
||||||
|
const openApiYaml = await response.text();
|
||||||
|
|
||||||
|
// Parse the yaml document
|
||||||
|
let schema = YAML.parse(openApiYaml) as JSONObject;
|
||||||
|
schema = openapiSchemaToJsonSchema(schema);
|
||||||
|
|
||||||
|
const jsonSchema = await $RefParser.dereference(schema);
|
||||||
|
|
||||||
|
assert("components" in jsonSchema);
|
||||||
|
const completionRequestSchema = jsonSchema.components.schemas
|
||||||
|
.CreateCompletionRequest as JSONSchema4Object;
|
||||||
|
|
||||||
|
// We need to do a bit of surgery here since the Monaco editor doesn't like
|
||||||
|
// the fact that the schema says `model` can be either a string or an enum,
|
||||||
|
// and displays a warning in the editor. Let's stick with just an enum for
|
||||||
|
// now and drop the string option.
|
||||||
|
assert(
|
||||||
|
"properties" in completionRequestSchema &&
|
||||||
|
isObject(completionRequestSchema.properties) &&
|
||||||
|
"model" in completionRequestSchema.properties &&
|
||||||
|
isObject(completionRequestSchema.properties.model),
|
||||||
|
);
|
||||||
|
|
||||||
|
const modelProperty = completionRequestSchema.properties.model;
|
||||||
|
assert(
|
||||||
|
"oneOf" in modelProperty &&
|
||||||
|
Array.isArray(modelProperty.oneOf) &&
|
||||||
|
modelProperty.oneOf.length === 2 &&
|
||||||
|
isObject(modelProperty.oneOf[1]) &&
|
||||||
|
"enum" in modelProperty.oneOf[1],
|
||||||
|
"Expected model to have oneOf length of 2",
|
||||||
|
);
|
||||||
|
modelProperty.type = "string";
|
||||||
|
modelProperty.enum = modelProperty.oneOf[1].enum;
|
||||||
|
delete modelProperty["oneOf"];
|
||||||
|
|
||||||
|
// Get the directory of the current script
|
||||||
|
const currentDirectory = path.dirname(import.meta.url).replace("file://", "");
|
||||||
|
|
||||||
|
// Write the JSON schema to a file in the current directory
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(currentDirectory, "input.schema.json"),
|
||||||
|
await prettier.format(JSON.stringify(completionRequestSchema, null, 2), {
|
||||||
|
parser: "json",
|
||||||
|
plugins: [parserBabel, parserEstree],
|
||||||
|
}),
|
||||||
|
);
|
||||||
129
src/modelProviders/anthropic/codegen/input.schema.json
Normal file
129
src/modelProviders/anthropic/codegen/input.schema.json
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"model": {
|
||||||
|
"description": "The model that will complete your prompt.\nAs we improve Claude, we develop new versions of it that you can query.\nThis parameter controls which version of Claude answers your request.\nRight now we are offering two model families: Claude, and Claude Instant.\nYou can use them by setting model to \"claude-2\" or \"claude-instant-1\", respectively.\nSee models for additional details.\n",
|
||||||
|
"x-oaiTypeLabel": "string",
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"claude-2",
|
||||||
|
"claude-2.0",
|
||||||
|
"claude-instant-1",
|
||||||
|
"claude-instant-1.1"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"prompt": {
|
||||||
|
"description": "The prompt that you want Claude to complete.\n\nFor proper response generation you will need to format your prompt as follows:\n\\n\\nHuman: ${userQuestion}\\n\\nAssistant:\nSee our comments on prompts for more context.\n",
|
||||||
|
"default": "<|endoftext|>",
|
||||||
|
"nullable": true,
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"default": "",
|
||||||
|
"example": "This is a test."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "",
|
||||||
|
"example": "This is a test."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"example": "[1212, 318, 257, 1332, 13]"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"example": "[[1212, 318, 257, 1332, 13]]"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"max_tokens_to_sample": {
|
||||||
|
"type": "integer",
|
||||||
|
"minimum": 1,
|
||||||
|
"default": 256,
|
||||||
|
"example": 256,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "The maximum number of tokens to generate before stopping.\n\nNote that our models may stop before reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.\n"
|
||||||
|
},
|
||||||
|
"temperature": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"maximum": 1,
|
||||||
|
"default": 1,
|
||||||
|
"example": 1,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Amount of randomness injected into the response.\n\nDefaults to 1. Ranges from 0 to 1. Use temp closer to 0 for analytical / multiple choice, and closer to 1 for creative and generative tasks.\n"
|
||||||
|
},
|
||||||
|
"top_p": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"maximum": 1,
|
||||||
|
"default": 1,
|
||||||
|
"example": 1,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Use nucleus sampling.\n\nIn nucleus sampling, we compute the cumulative distribution over all the options \nfor each subsequent token in decreasing probability order and cut it off once \nit reaches a particular probability specified by top_p. You should either alter temperature or top_p, but not both.\n"
|
||||||
|
},
|
||||||
|
"top_k": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"default": 5,
|
||||||
|
"example": 5,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Only sample from the top K options for each subsequent token.\n\nUsed to remove \"long tail\" low probability responses. Learn more technical details here.\n"
|
||||||
|
},
|
||||||
|
"stream": {
|
||||||
|
"description": "Whether to incrementally stream the response using server-sent events.\nSee this guide to SSE events for details.type: boolean\n",
|
||||||
|
"nullable": true,
|
||||||
|
"default": false
|
||||||
|
},
|
||||||
|
"stop_sequences": {
|
||||||
|
"description": "Sequences that will cause the model to stop generating completion text.\nOur models stop on \"\\n\\nHuman:\", and may include additional built-in stop sequences in the future. By providing the stop_sequences parameter, you may include additional strings that will cause the model to stop generating.\n",
|
||||||
|
"default": null,
|
||||||
|
"nullable": true,
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"default": "<|endoftext|>",
|
||||||
|
"example": "\n",
|
||||||
|
"nullable": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"maxItems": 4,
|
||||||
|
"items": {
|
||||||
|
"type": "string",
|
||||||
|
"example": "[\"\\n\"]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"user_id": {
|
||||||
|
"type": "string",
|
||||||
|
"example": "13803d75-b4b5-4c3e-b2a2-6f21399b021b",
|
||||||
|
"description": "An external identifier for the user who is associated with the request.\n\nThis should be a uuid, hash value, or other opaque identifier. Anthropic may use this id to help detect abuse. \nDo not include any identifying information such as name, email address, or phone number.\n"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "An object describing metadata about the request.\n"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["model", "prompt", "max_tokens_to_sample"]
|
||||||
|
}
|
||||||
40
src/modelProviders/anthropic/frontend.ts
Normal file
40
src/modelProviders/anthropic/frontend.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import { type Completion } from "@anthropic-ai/sdk/resources";
|
||||||
|
import { type SupportedModel } from ".";
|
||||||
|
import { type FrontendModelProvider } from "../types";
|
||||||
|
import { refinementActions } from "./refinementActions";
|
||||||
|
|
||||||
|
const frontendModelProvider: FrontendModelProvider<SupportedModel, Completion> = {
|
||||||
|
name: "Replicate Llama2",
|
||||||
|
|
||||||
|
models: {
|
||||||
|
"claude-2.0": {
|
||||||
|
name: "Claude 2.0",
|
||||||
|
contextWindow: 100000,
|
||||||
|
promptTokenPrice: 11.02 / 1000000,
|
||||||
|
completionTokenPrice: 32.68 / 1000000,
|
||||||
|
speed: "medium",
|
||||||
|
provider: "anthropic",
|
||||||
|
learnMoreUrl: "https://www.anthropic.com/product",
|
||||||
|
},
|
||||||
|
"claude-instant-1.1": {
|
||||||
|
name: "Claude Instant 1.1",
|
||||||
|
contextWindow: 100000,
|
||||||
|
promptTokenPrice: 1.63 / 1000000,
|
||||||
|
completionTokenPrice: 5.51 / 1000000,
|
||||||
|
speed: "fast",
|
||||||
|
provider: "anthropic",
|
||||||
|
learnMoreUrl: "https://www.anthropic.com/product",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
refinementActions,
|
||||||
|
|
||||||
|
normalizeOutput: (output) => {
|
||||||
|
return {
|
||||||
|
type: "text",
|
||||||
|
value: output.completion,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default frontendModelProvider;
|
||||||
86
src/modelProviders/anthropic/getCompletion.ts
Normal file
86
src/modelProviders/anthropic/getCompletion.ts
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import { env } from "~/env.mjs";
|
||||||
|
import { type CompletionResponse } from "../types";
|
||||||
|
|
||||||
|
import Anthropic, { APIError } from "@anthropic-ai/sdk";
|
||||||
|
import { type Completion, type CompletionCreateParams } from "@anthropic-ai/sdk/resources";
|
||||||
|
import { isObject, isString } from "lodash-es";
|
||||||
|
|
||||||
|
const anthropic = new Anthropic({
|
||||||
|
apiKey: env.ANTHROPIC_API_KEY,
|
||||||
|
});
|
||||||
|
|
||||||
|
export async function getCompletion(
|
||||||
|
input: CompletionCreateParams,
|
||||||
|
onStream: ((partialOutput: Completion) => void) | null,
|
||||||
|
): Promise<CompletionResponse<Completion>> {
|
||||||
|
const start = Date.now();
|
||||||
|
let finalCompletion: Completion | null = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (onStream) {
|
||||||
|
const resp = await anthropic.completions.create(
|
||||||
|
{ ...input, stream: true },
|
||||||
|
{
|
||||||
|
maxRetries: 0,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
for await (const part of resp) {
|
||||||
|
if (finalCompletion === null) {
|
||||||
|
finalCompletion = part;
|
||||||
|
} else {
|
||||||
|
finalCompletion = { ...part, completion: finalCompletion.completion + part.completion };
|
||||||
|
}
|
||||||
|
onStream(finalCompletion);
|
||||||
|
}
|
||||||
|
if (!finalCompletion) {
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message: "Streaming failed to return a completion",
|
||||||
|
autoRetry: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const resp = await anthropic.completions.create(
|
||||||
|
{ ...input, stream: false },
|
||||||
|
{
|
||||||
|
maxRetries: 0,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
finalCompletion = resp;
|
||||||
|
}
|
||||||
|
const timeToComplete = Date.now() - start;
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "success",
|
||||||
|
statusCode: 200,
|
||||||
|
value: finalCompletion,
|
||||||
|
timeToComplete,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
console.log("CAUGHT ERROR", error);
|
||||||
|
if (error instanceof APIError) {
|
||||||
|
const message =
|
||||||
|
isObject(error.error) &&
|
||||||
|
"error" in error.error &&
|
||||||
|
isObject(error.error.error) &&
|
||||||
|
"message" in error.error.error &&
|
||||||
|
isString(error.error.error.message)
|
||||||
|
? error.error.error.message
|
||||||
|
: error.message;
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message,
|
||||||
|
autoRetry: error.status === 429 || error.status === 503,
|
||||||
|
statusCode: error.status,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message: (error as Error).message,
|
||||||
|
autoRetry: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
34
src/modelProviders/anthropic/index.ts
Normal file
34
src/modelProviders/anthropic/index.ts
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import { type JSONSchema4 } from "json-schema";
|
||||||
|
import { type ModelProvider } from "../types";
|
||||||
|
import inputSchema from "./codegen/input.schema.json";
|
||||||
|
import { getCompletion } from "./getCompletion";
|
||||||
|
import frontendModelProvider from "./frontend";
|
||||||
|
import type { Completion, CompletionCreateParams } from "@anthropic-ai/sdk/resources";
|
||||||
|
|
||||||
|
const supportedModels = ["claude-2.0", "claude-instant-1.1"] as const;
|
||||||
|
|
||||||
|
export type SupportedModel = (typeof supportedModels)[number];
|
||||||
|
|
||||||
|
export type AnthropicProvider = ModelProvider<SupportedModel, CompletionCreateParams, Completion>;
|
||||||
|
|
||||||
|
const modelProvider: AnthropicProvider = {
|
||||||
|
getModel: (input) => {
|
||||||
|
if (supportedModels.includes(input.model as SupportedModel))
|
||||||
|
return input.model as SupportedModel;
|
||||||
|
|
||||||
|
const modelMaps: Record<string, SupportedModel> = {
|
||||||
|
"claude-2": "claude-2.0",
|
||||||
|
"claude-instant-1": "claude-instant-1.1",
|
||||||
|
};
|
||||||
|
|
||||||
|
if (input.model in modelMaps) return modelMaps[input.model] as SupportedModel;
|
||||||
|
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
inputSchema: inputSchema as JSONSchema4,
|
||||||
|
canStream: true,
|
||||||
|
getCompletion,
|
||||||
|
...frontendModelProvider,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default modelProvider;
|
||||||
3
src/modelProviders/anthropic/refinementActions.ts
Normal file
3
src/modelProviders/anthropic/refinementActions.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
import { type RefinementAction } from "../types";
|
||||||
|
|
||||||
|
export const refinementActions: Record<string, RefinementAction> = {};
|
||||||
15
src/modelProviders/frontendModelProviders.ts
Normal file
15
src/modelProviders/frontendModelProviders.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import openaiChatCompletionFrontend from "./openai-ChatCompletion/frontend";
|
||||||
|
import replicateLlama2Frontend from "./replicate-llama2/frontend";
|
||||||
|
import anthropicFrontend from "./anthropic/frontend";
|
||||||
|
import { type SupportedProvider, type FrontendModelProvider } from "./types";
|
||||||
|
|
||||||
|
// Keep attributes here that need to be accessible from the frontend. We can't
|
||||||
|
// just include them in the default `modelProviders` object because it has some
|
||||||
|
// transient dependencies that can only be imported on the server.
|
||||||
|
const frontendModelProviders: Record<SupportedProvider, FrontendModelProvider<any, any>> = {
|
||||||
|
"openai/ChatCompletion": openaiChatCompletionFrontend,
|
||||||
|
"replicate/llama2": replicateLlama2Frontend,
|
||||||
|
anthropic: anthropicFrontend,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default frontendModelProviders;
|
||||||
36
src/modelProviders/generateTypes.ts
Normal file
36
src/modelProviders/generateTypes.ts
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import { type JSONSchema4Object } from "json-schema";
|
||||||
|
import modelProviders from "./modelProviders";
|
||||||
|
import { compile } from "json-schema-to-typescript";
|
||||||
|
import dedent from "dedent";
|
||||||
|
|
||||||
|
export default async function generateTypes() {
|
||||||
|
const combinedSchema = {
|
||||||
|
type: "object",
|
||||||
|
properties: {} as Record<string, JSONSchema4Object>,
|
||||||
|
};
|
||||||
|
|
||||||
|
Object.entries(modelProviders).forEach(([id, provider]) => {
|
||||||
|
combinedSchema.properties[id] = provider.inputSchema;
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.entries(modelProviders).forEach(([id, provider]) => {
|
||||||
|
combinedSchema.properties[id] = provider.inputSchema;
|
||||||
|
});
|
||||||
|
|
||||||
|
const promptTypes = (
|
||||||
|
await compile(combinedSchema as JSONSchema4Object, "PromptTypes", {
|
||||||
|
additionalProperties: false,
|
||||||
|
bannerComment: dedent`
|
||||||
|
/**
|
||||||
|
* This type map defines the input types for each model provider.
|
||||||
|
*/
|
||||||
|
`,
|
||||||
|
})
|
||||||
|
).replace(/export interface PromptTypes/g, "interface PromptTypes");
|
||||||
|
|
||||||
|
return dedent`
|
||||||
|
${promptTypes}
|
||||||
|
|
||||||
|
declare function definePrompt<T extends keyof PromptTypes>(modelProvider: T, input: PromptTypes[T])
|
||||||
|
`;
|
||||||
|
}
|
||||||
12
src/modelProviders/modelProviders.ts
Normal file
12
src/modelProviders/modelProviders.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import openaiChatCompletion from "./openai-ChatCompletion";
|
||||||
|
import replicateLlama2 from "./replicate-llama2";
|
||||||
|
import anthropic from "./anthropic";
|
||||||
|
import { type SupportedProvider, type ModelProvider } from "./types";
|
||||||
|
|
||||||
|
const modelProviders: Record<SupportedProvider, ModelProvider<any, any, any>> = {
|
||||||
|
"openai/ChatCompletion": openaiChatCompletion,
|
||||||
|
"replicate/llama2": replicateLlama2,
|
||||||
|
anthropic,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default modelProviders;
|
||||||
77
src/modelProviders/openai-ChatCompletion/codegen/codegen.ts
Normal file
77
src/modelProviders/openai-ChatCompletion/codegen/codegen.ts
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||||
|
|
||||||
|
import YAML from "yaml";
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
import { openapiSchemaToJsonSchema } from "@openapi-contrib/openapi-schema-to-json-schema";
|
||||||
|
import $RefParser from "@apidevtools/json-schema-ref-parser";
|
||||||
|
import { type JSONObject } from "superjson/dist/types";
|
||||||
|
import assert from "assert";
|
||||||
|
import { type JSONSchema4Object } from "json-schema";
|
||||||
|
import { isObject } from "lodash-es";
|
||||||
|
|
||||||
|
// @ts-expect-error for some reason missing from types
|
||||||
|
import parserEstree from "prettier/plugins/estree";
|
||||||
|
import parserBabel from "prettier/plugins/babel";
|
||||||
|
import prettier from "prettier/standalone";
|
||||||
|
|
||||||
|
const OPENAPI_URL =
|
||||||
|
"https://raw.githubusercontent.com/openai/openai-openapi/0c432eb66fd0c758fd8b9bd69db41c1096e5f4db/openapi.yaml";
|
||||||
|
|
||||||
|
// Fetch the openapi document
|
||||||
|
const response = await fetch(OPENAPI_URL);
|
||||||
|
const openApiYaml = await response.text();
|
||||||
|
|
||||||
|
// Parse the yaml document
|
||||||
|
let schema = YAML.parse(openApiYaml) as JSONObject;
|
||||||
|
schema = openapiSchemaToJsonSchema(schema);
|
||||||
|
|
||||||
|
const jsonSchema = await $RefParser.dereference(schema);
|
||||||
|
|
||||||
|
assert("components" in jsonSchema);
|
||||||
|
const completionRequestSchema = jsonSchema.components.schemas
|
||||||
|
.CreateChatCompletionRequest as JSONSchema4Object;
|
||||||
|
|
||||||
|
// We need to do a bit of surgery here since the Monaco editor doesn't like
|
||||||
|
// the fact that the schema says `model` can be either a string or an enum,
|
||||||
|
// and displays a warning in the editor. Let's stick with just an enum for
|
||||||
|
// now and drop the string option.
|
||||||
|
assert(
|
||||||
|
"properties" in completionRequestSchema &&
|
||||||
|
isObject(completionRequestSchema.properties) &&
|
||||||
|
"model" in completionRequestSchema.properties &&
|
||||||
|
isObject(completionRequestSchema.properties.model),
|
||||||
|
);
|
||||||
|
|
||||||
|
const modelProperty = completionRequestSchema.properties.model;
|
||||||
|
assert(
|
||||||
|
"oneOf" in modelProperty &&
|
||||||
|
Array.isArray(modelProperty.oneOf) &&
|
||||||
|
modelProperty.oneOf.length === 2 &&
|
||||||
|
isObject(modelProperty.oneOf[1]) &&
|
||||||
|
"enum" in modelProperty.oneOf[1],
|
||||||
|
"Expected model to have oneOf length of 2",
|
||||||
|
);
|
||||||
|
modelProperty.type = "string";
|
||||||
|
modelProperty.enum = modelProperty.oneOf[1].enum;
|
||||||
|
delete modelProperty["oneOf"];
|
||||||
|
|
||||||
|
// The default of "inf" confuses the Typescript generator, so can just remove it
|
||||||
|
assert(
|
||||||
|
"max_tokens" in completionRequestSchema.properties &&
|
||||||
|
isObject(completionRequestSchema.properties.max_tokens) &&
|
||||||
|
"default" in completionRequestSchema.properties.max_tokens,
|
||||||
|
);
|
||||||
|
delete completionRequestSchema.properties.max_tokens["default"];
|
||||||
|
|
||||||
|
// Get the directory of the current script
|
||||||
|
const currentDirectory = path.dirname(import.meta.url).replace("file://", "");
|
||||||
|
|
||||||
|
// Write the JSON schema to a file in the current directory
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(currentDirectory, "input.schema.json"),
|
||||||
|
await prettier.format(JSON.stringify(completionRequestSchema, null, 2), {
|
||||||
|
parser: "json",
|
||||||
|
plugins: [parserBabel, parserEstree],
|
||||||
|
}),
|
||||||
|
);
|
||||||
@@ -0,0 +1,185 @@
|
|||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"model": {
|
||||||
|
"description": "ID of the model to use. See the [model endpoint compatibility](/docs/models/model-endpoint-compatibility) table for details on which models work with the Chat API.",
|
||||||
|
"example": "gpt-3.5-turbo",
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"gpt-4",
|
||||||
|
"gpt-4-0613",
|
||||||
|
"gpt-4-32k",
|
||||||
|
"gpt-4-32k-0613",
|
||||||
|
"gpt-3.5-turbo",
|
||||||
|
"gpt-3.5-turbo-16k",
|
||||||
|
"gpt-3.5-turbo-0613",
|
||||||
|
"gpt-3.5-turbo-16k-0613"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"messages": {
|
||||||
|
"description": "A list of messages comprising the conversation so far. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb).",
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"role": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["system", "user", "assistant", "function"],
|
||||||
|
"description": "The role of the messages author. One of `system`, `user`, `assistant`, or `function`."
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The contents of the message. `content` is required for all messages except assistant messages with function calls."
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The name of the author of this message. `name` is required if role is `function`, and it should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters."
|
||||||
|
},
|
||||||
|
"function_call": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "The name and arguments of a function that should be called, as generated by the model.",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The name of the function to call."
|
||||||
|
},
|
||||||
|
"arguments": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["role"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"functions": {
|
||||||
|
"description": "A list of functions the model may generate JSON inputs for.",
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64."
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The description of what the function does."
|
||||||
|
},
|
||||||
|
"parameters": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "The parameters the functions accepts, described as a JSON Schema object. See the [guide](/docs/guides/gpt/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format.",
|
||||||
|
"additionalProperties": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["name"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"function_call": {
|
||||||
|
"description": "Controls how the model responds to function calls. \"none\" means the model does not call a function, and responds to the end-user. \"auto\" means the model can pick between an end-user or calling a function. Specifying a particular function via `{\"name\":\\ \"my_function\"}` forces the model to call that function. \"none\" is the default when no functions are present. \"auto\" is the default if functions are present.",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["none", "auto"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The name of the function to call."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["name"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"temperature": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"maximum": 2,
|
||||||
|
"default": 1,
|
||||||
|
"example": 1,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.\n\nWe generally recommend altering this or `top_p` but not both.\n"
|
||||||
|
},
|
||||||
|
"top_p": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"maximum": 1,
|
||||||
|
"default": 1,
|
||||||
|
"example": 1,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.\n\nWe generally recommend altering this or `temperature` but not both.\n"
|
||||||
|
},
|
||||||
|
"n": {
|
||||||
|
"type": "integer",
|
||||||
|
"minimum": 1,
|
||||||
|
"maximum": 128,
|
||||||
|
"default": 1,
|
||||||
|
"example": 1,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "How many chat completion choices to generate for each input message."
|
||||||
|
},
|
||||||
|
"stream": {
|
||||||
|
"description": "If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) as they become available, with the stream terminated by a `data: [DONE]` message. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb).\n",
|
||||||
|
"type": "boolean",
|
||||||
|
"nullable": true,
|
||||||
|
"default": false
|
||||||
|
},
|
||||||
|
"stop": {
|
||||||
|
"description": "Up to 4 sequences where the API will stop generating further tokens.\n",
|
||||||
|
"default": null,
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"nullable": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"maxItems": 4,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"max_tokens": {
|
||||||
|
"description": "The maximum number of [tokens](/tokenizer) to generate in the chat completion.\n\nThe total length of input tokens and generated tokens is limited by the model's context length. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\n",
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"presence_penalty": {
|
||||||
|
"type": "number",
|
||||||
|
"default": 0,
|
||||||
|
"minimum": -2,
|
||||||
|
"maximum": 2,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.\n\n[See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)\n"
|
||||||
|
},
|
||||||
|
"frequency_penalty": {
|
||||||
|
"type": "number",
|
||||||
|
"default": 0,
|
||||||
|
"minimum": -2,
|
||||||
|
"maximum": 2,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.\n\n[See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)\n"
|
||||||
|
},
|
||||||
|
"logit_bias": {
|
||||||
|
"type": "object",
|
||||||
|
"x-oaiTypeLabel": "map",
|
||||||
|
"default": null,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Modify the likelihood of specified tokens appearing in the completion.\n\nAccepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.\n"
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"type": "string",
|
||||||
|
"example": "user-1234",
|
||||||
|
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["model", "messages"]
|
||||||
|
}
|
||||||
87
src/modelProviders/openai-ChatCompletion/frontend.ts
Normal file
87
src/modelProviders/openai-ChatCompletion/frontend.ts
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
import { type JsonValue } from "type-fest";
|
||||||
|
import { type SupportedModel } from ".";
|
||||||
|
import { type FrontendModelProvider } from "../types";
|
||||||
|
import { type ChatCompletion } from "openai/resources/chat";
|
||||||
|
import { refinementActions } from "./refinementActions";
|
||||||
|
|
||||||
|
const frontendModelProvider: FrontendModelProvider<SupportedModel, ChatCompletion> = {
|
||||||
|
name: "OpenAI ChatCompletion",
|
||||||
|
|
||||||
|
models: {
|
||||||
|
"gpt-4-0613": {
|
||||||
|
name: "GPT-4",
|
||||||
|
contextWindow: 8192,
|
||||||
|
promptTokenPrice: 0.00003,
|
||||||
|
completionTokenPrice: 0.00006,
|
||||||
|
speed: "medium",
|
||||||
|
provider: "openai/ChatCompletion",
|
||||||
|
learnMoreUrl: "https://openai.com/gpt-4",
|
||||||
|
},
|
||||||
|
"gpt-4-32k-0613": {
|
||||||
|
name: "GPT-4 32k",
|
||||||
|
contextWindow: 32768,
|
||||||
|
promptTokenPrice: 0.00006,
|
||||||
|
completionTokenPrice: 0.00012,
|
||||||
|
speed: "medium",
|
||||||
|
provider: "openai/ChatCompletion",
|
||||||
|
learnMoreUrl: "https://openai.com/gpt-4",
|
||||||
|
},
|
||||||
|
"gpt-3.5-turbo-0613": {
|
||||||
|
name: "GPT-3.5 Turbo",
|
||||||
|
contextWindow: 4096,
|
||||||
|
promptTokenPrice: 0.0000015,
|
||||||
|
completionTokenPrice: 0.000002,
|
||||||
|
speed: "fast",
|
||||||
|
provider: "openai/ChatCompletion",
|
||||||
|
learnMoreUrl: "https://platform.openai.com/docs/guides/gpt/chat-completions-api",
|
||||||
|
},
|
||||||
|
"gpt-3.5-turbo-16k-0613": {
|
||||||
|
name: "GPT-3.5 Turbo 16k",
|
||||||
|
contextWindow: 16384,
|
||||||
|
promptTokenPrice: 0.000003,
|
||||||
|
completionTokenPrice: 0.000004,
|
||||||
|
speed: "fast",
|
||||||
|
provider: "openai/ChatCompletion",
|
||||||
|
learnMoreUrl: "https://platform.openai.com/docs/guides/gpt/chat-completions-api",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
refinementActions,
|
||||||
|
|
||||||
|
normalizeOutput: (output) => {
|
||||||
|
const message = output.choices[0]?.message;
|
||||||
|
if (!message)
|
||||||
|
return {
|
||||||
|
type: "json",
|
||||||
|
value: output as unknown as JsonValue,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (message.content) {
|
||||||
|
return {
|
||||||
|
type: "text",
|
||||||
|
value: message.content,
|
||||||
|
};
|
||||||
|
} else if (message.function_call) {
|
||||||
|
let args = message.function_call.arguments ?? "";
|
||||||
|
try {
|
||||||
|
args = JSON.parse(args);
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
type: "json",
|
||||||
|
value: {
|
||||||
|
...message.function_call,
|
||||||
|
arguments: args,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
type: "json",
|
||||||
|
value: message as unknown as JsonValue,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default frontendModelProvider;
|
||||||
139
src/modelProviders/openai-ChatCompletion/getCompletion.ts
Normal file
139
src/modelProviders/openai-ChatCompletion/getCompletion.ts
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||||
|
import {
|
||||||
|
type ChatCompletionChunk,
|
||||||
|
type ChatCompletion,
|
||||||
|
type CompletionCreateParams,
|
||||||
|
} from "openai/resources/chat";
|
||||||
|
import { countOpenAIChatTokens } from "~/utils/countTokens";
|
||||||
|
import { type CompletionResponse } from "../types";
|
||||||
|
import { omit } from "lodash-es";
|
||||||
|
import { openai } from "~/server/utils/openai";
|
||||||
|
import { truthyFilter } from "~/utils/utils";
|
||||||
|
import { APIError } from "openai";
|
||||||
|
import frontendModelProvider from "./frontend";
|
||||||
|
import modelProvider, { type SupportedModel } from ".";
|
||||||
|
|
||||||
|
const mergeStreamedChunks = (
|
||||||
|
base: ChatCompletion | null,
|
||||||
|
chunk: ChatCompletionChunk,
|
||||||
|
): ChatCompletion => {
|
||||||
|
if (base === null) {
|
||||||
|
return mergeStreamedChunks({ ...chunk, choices: [] }, chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
const choices = [...base.choices];
|
||||||
|
for (const choice of chunk.choices) {
|
||||||
|
const baseChoice = choices.find((c) => c.index === choice.index);
|
||||||
|
if (baseChoice) {
|
||||||
|
baseChoice.finish_reason = choice.finish_reason ?? baseChoice.finish_reason;
|
||||||
|
baseChoice.message = baseChoice.message ?? { role: "assistant" };
|
||||||
|
|
||||||
|
if (choice.delta?.content)
|
||||||
|
baseChoice.message.content =
|
||||||
|
((baseChoice.message.content as string) ?? "") + (choice.delta.content ?? "");
|
||||||
|
if (choice.delta?.function_call) {
|
||||||
|
const fnCall = baseChoice.message.function_call ?? {};
|
||||||
|
fnCall.name =
|
||||||
|
((fnCall.name as string) ?? "") + ((choice.delta.function_call.name as string) ?? "");
|
||||||
|
fnCall.arguments =
|
||||||
|
((fnCall.arguments as string) ?? "") +
|
||||||
|
((choice.delta.function_call.arguments as string) ?? "");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
choices.push({ ...omit(choice, "delta"), message: { role: "assistant", ...choice.delta } });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const merged: ChatCompletion = {
|
||||||
|
...base,
|
||||||
|
choices,
|
||||||
|
};
|
||||||
|
|
||||||
|
return merged;
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function getCompletion(
|
||||||
|
input: CompletionCreateParams,
|
||||||
|
onStream: ((partialOutput: ChatCompletion) => void) | null,
|
||||||
|
): Promise<CompletionResponse<ChatCompletion>> {
|
||||||
|
const start = Date.now();
|
||||||
|
let finalCompletion: ChatCompletion | null = null;
|
||||||
|
let promptTokens: number | undefined = undefined;
|
||||||
|
let completionTokens: number | undefined = undefined;
|
||||||
|
const modelName = modelProvider.getModel(input) as SupportedModel;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (onStream) {
|
||||||
|
const resp = await openai.chat.completions.create(
|
||||||
|
{ ...input, stream: true },
|
||||||
|
{
|
||||||
|
maxRetries: 0,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
for await (const part of resp) {
|
||||||
|
finalCompletion = mergeStreamedChunks(finalCompletion, part);
|
||||||
|
onStream(finalCompletion);
|
||||||
|
}
|
||||||
|
if (!finalCompletion) {
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message: "Streaming failed to return a completion",
|
||||||
|
autoRetry: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
promptTokens = countOpenAIChatTokens(modelName, input.messages);
|
||||||
|
completionTokens = countOpenAIChatTokens(
|
||||||
|
modelName,
|
||||||
|
finalCompletion.choices.map((c) => c.message).filter(truthyFilter),
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
// TODO handle this, library seems like maybe it doesn't work with function calls?
|
||||||
|
console.error(err);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const resp = await openai.chat.completions.create(
|
||||||
|
{ ...input, stream: false },
|
||||||
|
{
|
||||||
|
maxRetries: 0,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
finalCompletion = resp;
|
||||||
|
promptTokens = resp.usage?.prompt_tokens ?? 0;
|
||||||
|
completionTokens = resp.usage?.completion_tokens ?? 0;
|
||||||
|
}
|
||||||
|
const timeToComplete = Date.now() - start;
|
||||||
|
|
||||||
|
const { promptTokenPrice, completionTokenPrice } = frontendModelProvider.models[modelName];
|
||||||
|
let cost = undefined;
|
||||||
|
if (promptTokenPrice && completionTokenPrice && promptTokens && completionTokens) {
|
||||||
|
cost = promptTokens * promptTokenPrice + completionTokens * completionTokenPrice;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "success",
|
||||||
|
statusCode: 200,
|
||||||
|
value: finalCompletion,
|
||||||
|
timeToComplete,
|
||||||
|
promptTokens,
|
||||||
|
completionTokens,
|
||||||
|
cost,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof APIError) {
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message: error.message,
|
||||||
|
autoRetry: error.status === 429 || error.status === 503,
|
||||||
|
statusCode: error.status,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
console.error(error);
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message: (error as Error).message,
|
||||||
|
autoRetry: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
45
src/modelProviders/openai-ChatCompletion/index.ts
Normal file
45
src/modelProviders/openai-ChatCompletion/index.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { type JSONSchema4 } from "json-schema";
|
||||||
|
import { type ModelProvider } from "../types";
|
||||||
|
import inputSchema from "./codegen/input.schema.json";
|
||||||
|
import { type ChatCompletion, type CompletionCreateParams } from "openai/resources/chat";
|
||||||
|
import { getCompletion } from "./getCompletion";
|
||||||
|
import frontendModelProvider from "./frontend";
|
||||||
|
|
||||||
|
const supportedModels = [
|
||||||
|
"gpt-4-0613",
|
||||||
|
"gpt-4-32k-0613",
|
||||||
|
"gpt-3.5-turbo-0613",
|
||||||
|
"gpt-3.5-turbo-16k-0613",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
export type SupportedModel = (typeof supportedModels)[number];
|
||||||
|
|
||||||
|
export type OpenaiChatModelProvider = ModelProvider<
|
||||||
|
SupportedModel,
|
||||||
|
CompletionCreateParams,
|
||||||
|
ChatCompletion
|
||||||
|
>;
|
||||||
|
|
||||||
|
const modelProvider: OpenaiChatModelProvider = {
|
||||||
|
getModel: (input) => {
|
||||||
|
if (supportedModels.includes(input.model as SupportedModel))
|
||||||
|
return input.model as SupportedModel;
|
||||||
|
|
||||||
|
const modelMaps: Record<string, SupportedModel> = {
|
||||||
|
"gpt-4": "gpt-4-0613",
|
||||||
|
"gpt-4-32k": "gpt-4-32k-0613",
|
||||||
|
"gpt-3.5-turbo": "gpt-3.5-turbo-0613",
|
||||||
|
"gpt-3.5-turbo-16k": "gpt-3.5-turbo-16k-0613",
|
||||||
|
};
|
||||||
|
|
||||||
|
if (input.model in modelMaps) return modelMaps[input.model] as SupportedModel;
|
||||||
|
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
inputSchema: inputSchema as JSONSchema4,
|
||||||
|
canStream: true,
|
||||||
|
getCompletion,
|
||||||
|
...frontendModelProvider,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default modelProvider;
|
||||||
279
src/modelProviders/openai-ChatCompletion/refinementActions.ts
Normal file
279
src/modelProviders/openai-ChatCompletion/refinementActions.ts
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
import { TfiThought } from "react-icons/tfi";
|
||||||
|
import { type RefinementAction } from "../types";
|
||||||
|
import { VscJson } from "react-icons/vsc";
|
||||||
|
|
||||||
|
export const refinementActions: Record<string, RefinementAction> = {
|
||||||
|
"Add chain of thought": {
|
||||||
|
icon: VscJson,
|
||||||
|
description: "Asking the model to plan its answer can increase accuracy.",
|
||||||
|
instructions: `Adding chain of thought means asking the model to think about its answer before it gives it to you. This is useful for getting more accurate answers. Do not add an assistant message.
|
||||||
|
|
||||||
|
This is what a prompt looks like before adding chain of thought:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-4",
|
||||||
|
stream: true,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: \`Evaluate sentiment.\`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral"\`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
This is what one looks like after adding chain of thought:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-4",
|
||||||
|
stream: true,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: \`Evaluate sentiment.\`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral". Explain your answer before you give a score, then return the score on a new line.\`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
Here's another example:
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`Title: \${scenario.title}
|
||||||
|
Body: \${scenario.body}
|
||||||
|
|
||||||
|
Need: \${scenario.need}
|
||||||
|
|
||||||
|
Rate likelihood on 1-3 scale.\`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
temperature: 0,
|
||||||
|
functions: [
|
||||||
|
{
|
||||||
|
name: "score_post",
|
||||||
|
parameters: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
score: {
|
||||||
|
type: "number",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
function_call: {
|
||||||
|
name: "score_post",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`Title: \${scenario.title}
|
||||||
|
Body: \${scenario.body}
|
||||||
|
|
||||||
|
Need: \${scenario.need}
|
||||||
|
|
||||||
|
Rate likelihood on 1-3 scale. Provide an explanation, but always provide a score afterward.\`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
temperature: 0,
|
||||||
|
functions: [
|
||||||
|
{
|
||||||
|
name: "score_post",
|
||||||
|
parameters: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
explanation: {
|
||||||
|
type: "string",
|
||||||
|
}
|
||||||
|
score: {
|
||||||
|
type: "number",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
function_call: {
|
||||||
|
name: "score_post",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
Add chain of thought to the original prompt.`,
|
||||||
|
},
|
||||||
|
"Convert to function call": {
|
||||||
|
icon: TfiThought,
|
||||||
|
description: "Use function calls to get output from the model in a more structured way.",
|
||||||
|
instructions: `OpenAI functions are a specialized way for an LLM to return output.
|
||||||
|
|
||||||
|
This is what a prompt looks like before adding a function:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-4",
|
||||||
|
stream: true,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: \`Evaluate sentiment.\`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral"\`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
This is what one looks like after adding a function:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-4",
|
||||||
|
stream: true,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: "Evaluate sentiment.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: scenario.user_message,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
functions: [
|
||||||
|
{
|
||||||
|
name: "extract_sentiment",
|
||||||
|
parameters: {
|
||||||
|
type: "object", // parameters must always be an object with a properties key
|
||||||
|
properties: { // properties key is required
|
||||||
|
sentiment: {
|
||||||
|
type: "string",
|
||||||
|
description: "one of positive/negative/neutral",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
function_call: {
|
||||||
|
name: "extract_sentiment",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
Here's another example of adding a function:
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`Here is the title and body of a reddit post I am interested in:
|
||||||
|
|
||||||
|
title: \${scenario.title}
|
||||||
|
body: \${scenario.body}
|
||||||
|
|
||||||
|
On a scale from 1 to 3, how likely is it that the person writing this post has the following need? If you are not sure, make your best guess, or answer 1.
|
||||||
|
|
||||||
|
Need: \${scenario.need}
|
||||||
|
|
||||||
|
Answer one integer between 1 and 3.\`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
temperature: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`Title: \${scenario.title}
|
||||||
|
Body: \${scenario.body}
|
||||||
|
|
||||||
|
Need: \${scenario.need}
|
||||||
|
|
||||||
|
Rate likelihood on 1-3 scale.\`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
temperature: 0,
|
||||||
|
functions: [
|
||||||
|
{
|
||||||
|
name: "score_post",
|
||||||
|
parameters: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
score: {
|
||||||
|
type: "number",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
function_call: {
|
||||||
|
name: "score_post",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
Another example
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo",
|
||||||
|
stream: true,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: \`Write 'Start experimenting!' in \${scenario.language}\`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: \`Write 'Start experimenting!' in \${scenario.language}\`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
functions: [
|
||||||
|
{
|
||||||
|
name: "write_in_language",
|
||||||
|
parameters: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
text: {
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
function_call: {
|
||||||
|
name: "write_in_language",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
Add an OpenAI function that takes one or more nested parameters that match the expected output from this prompt.`,
|
||||||
|
},
|
||||||
|
};
|
||||||
45
src/modelProviders/replicate-llama2/frontend.ts
Normal file
45
src/modelProviders/replicate-llama2/frontend.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { type SupportedModel, type ReplicateLlama2Output } from ".";
|
||||||
|
import { type FrontendModelProvider } from "../types";
|
||||||
|
import { refinementActions } from "./refinementActions";
|
||||||
|
|
||||||
|
const frontendModelProvider: FrontendModelProvider<SupportedModel, ReplicateLlama2Output> = {
|
||||||
|
name: "Replicate Llama2",
|
||||||
|
|
||||||
|
models: {
|
||||||
|
"7b-chat": {
|
||||||
|
name: "LLama 2 7B Chat",
|
||||||
|
contextWindow: 4096,
|
||||||
|
pricePerSecond: 0.0023,
|
||||||
|
speed: "fast",
|
||||||
|
provider: "replicate/llama2",
|
||||||
|
learnMoreUrl: "https://replicate.com/a16z-infra/llama7b-v2-chat",
|
||||||
|
},
|
||||||
|
"13b-chat": {
|
||||||
|
name: "LLama 2 13B Chat",
|
||||||
|
contextWindow: 4096,
|
||||||
|
pricePerSecond: 0.0023,
|
||||||
|
speed: "medium",
|
||||||
|
provider: "replicate/llama2",
|
||||||
|
learnMoreUrl: "https://replicate.com/a16z-infra/llama13b-v2-chat",
|
||||||
|
},
|
||||||
|
"70b-chat": {
|
||||||
|
name: "LLama 2 70B Chat",
|
||||||
|
contextWindow: 4096,
|
||||||
|
pricePerSecond: 0.0032,
|
||||||
|
speed: "slow",
|
||||||
|
provider: "replicate/llama2",
|
||||||
|
learnMoreUrl: "https://replicate.com/replicate/llama70b-v2-chat",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
refinementActions,
|
||||||
|
|
||||||
|
normalizeOutput: (output) => {
|
||||||
|
return {
|
||||||
|
type: "text",
|
||||||
|
value: output.join(""),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default frontendModelProvider;
|
||||||
60
src/modelProviders/replicate-llama2/getCompletion.ts
Normal file
60
src/modelProviders/replicate-llama2/getCompletion.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import { env } from "~/env.mjs";
|
||||||
|
import { type ReplicateLlama2Input, type ReplicateLlama2Output } from ".";
|
||||||
|
import { type CompletionResponse } from "../types";
|
||||||
|
import Replicate from "replicate";
|
||||||
|
|
||||||
|
const replicate = new Replicate({
|
||||||
|
auth: env.REPLICATE_API_TOKEN || "",
|
||||||
|
});
|
||||||
|
|
||||||
|
const modelIds: Record<ReplicateLlama2Input["model"], string> = {
|
||||||
|
"7b-chat": "5ec5fdadd80ace49f5a2b2178cceeb9f2f77c493b85b1131002c26e6b2b13184",
|
||||||
|
"13b-chat": "6b4da803a2382c08868c5af10a523892f38e2de1aafb2ee55b020d9efef2fdb8",
|
||||||
|
"70b-chat": "2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1",
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function getCompletion(
|
||||||
|
input: ReplicateLlama2Input,
|
||||||
|
onStream: ((partialOutput: string[]) => void) | null,
|
||||||
|
): Promise<CompletionResponse<ReplicateLlama2Output>> {
|
||||||
|
const start = Date.now();
|
||||||
|
|
||||||
|
const { model, ...rest } = input;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const prediction = await replicate.predictions.create({
|
||||||
|
version: modelIds[model],
|
||||||
|
input: rest,
|
||||||
|
});
|
||||||
|
|
||||||
|
const interval = onStream
|
||||||
|
? // eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||||
|
setInterval(async () => {
|
||||||
|
const partialPrediction = await replicate.predictions.get(prediction.id);
|
||||||
|
|
||||||
|
if (partialPrediction.output) onStream(partialPrediction.output as ReplicateLlama2Output);
|
||||||
|
}, 500)
|
||||||
|
: null;
|
||||||
|
|
||||||
|
const resp = await replicate.wait(prediction, {});
|
||||||
|
if (interval) clearInterval(interval);
|
||||||
|
|
||||||
|
const timeToComplete = Date.now() - start;
|
||||||
|
|
||||||
|
if (resp.error) throw new Error(resp.error as string);
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "success",
|
||||||
|
statusCode: 200,
|
||||||
|
value: resp.output as ReplicateLlama2Output,
|
||||||
|
timeToComplete,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
console.error("ERROR IS", error);
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message: (error as Error).message,
|
||||||
|
autoRetry: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
81
src/modelProviders/replicate-llama2/index.ts
Normal file
81
src/modelProviders/replicate-llama2/index.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import { type ModelProvider } from "../types";
|
||||||
|
import frontendModelProvider from "./frontend";
|
||||||
|
import { getCompletion } from "./getCompletion";
|
||||||
|
|
||||||
|
const supportedModels = ["7b-chat", "13b-chat", "70b-chat"] as const;
|
||||||
|
|
||||||
|
export type SupportedModel = (typeof supportedModels)[number];
|
||||||
|
|
||||||
|
export type ReplicateLlama2Input = {
|
||||||
|
model: SupportedModel;
|
||||||
|
prompt: string;
|
||||||
|
max_length?: number;
|
||||||
|
temperature?: number;
|
||||||
|
top_p?: number;
|
||||||
|
repetition_penalty?: number;
|
||||||
|
debug?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type ReplicateLlama2Output = string[];
|
||||||
|
|
||||||
|
export type ReplicateLlama2Provider = ModelProvider<
|
||||||
|
SupportedModel,
|
||||||
|
ReplicateLlama2Input,
|
||||||
|
ReplicateLlama2Output
|
||||||
|
>;
|
||||||
|
|
||||||
|
const modelProvider: ReplicateLlama2Provider = {
|
||||||
|
getModel: (input) => {
|
||||||
|
if (supportedModels.includes(input.model)) return input.model;
|
||||||
|
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
inputSchema: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
model: {
|
||||||
|
type: "string",
|
||||||
|
enum: supportedModels as unknown as string[],
|
||||||
|
},
|
||||||
|
system_prompt: {
|
||||||
|
type: "string",
|
||||||
|
description:
|
||||||
|
"System prompt to send to Llama v2. This is prepended to the prompt and helps guide system behavior.",
|
||||||
|
},
|
||||||
|
prompt: {
|
||||||
|
type: "string",
|
||||||
|
description: "Prompt to send to Llama v2.",
|
||||||
|
},
|
||||||
|
max_new_tokens: {
|
||||||
|
type: "number",
|
||||||
|
description:
|
||||||
|
"Maximum number of tokens to generate. A word is generally 2-3 tokens (minimum: 1)",
|
||||||
|
},
|
||||||
|
temperature: {
|
||||||
|
type: "number",
|
||||||
|
description:
|
||||||
|
"Adjusts randomness of outputs, greater than 1 is random and 0 is deterministic, 0.75 is a good starting value. (minimum: 0.01; maximum: 5)",
|
||||||
|
},
|
||||||
|
top_p: {
|
||||||
|
type: "number",
|
||||||
|
description:
|
||||||
|
"When decoding text, samples from the top p percentage of most likely tokens; lower to ignore less likely tokens (minimum: 0.01; maximum: 1)",
|
||||||
|
},
|
||||||
|
repetition_penalty: {
|
||||||
|
type: "number",
|
||||||
|
description:
|
||||||
|
"Penalty for repeated words in generated text; 1 is no penalty, values greater than 1 discourage repetition, less than 1 encourage it. (minimum: 0.01; maximum: 5)",
|
||||||
|
},
|
||||||
|
debug: {
|
||||||
|
type: "boolean",
|
||||||
|
description: "provide debugging output in logs",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ["model", "prompt"],
|
||||||
|
},
|
||||||
|
canStream: true,
|
||||||
|
getCompletion,
|
||||||
|
...frontendModelProvider,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default modelProvider;
|
||||||
3
src/modelProviders/replicate-llama2/refinementActions.ts
Normal file
3
src/modelProviders/replicate-llama2/refinementActions.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
import { type RefinementAction } from "../types";
|
||||||
|
|
||||||
|
export const refinementActions: Record<string, RefinementAction> = {};
|
||||||
71
src/modelProviders/types.ts
Normal file
71
src/modelProviders/types.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { type JSONSchema4 } from "json-schema";
|
||||||
|
import { type IconType } from "react-icons";
|
||||||
|
import { type JsonValue } from "type-fest";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const ZodSupportedProvider = z.union([
|
||||||
|
z.literal("openai/ChatCompletion"),
|
||||||
|
z.literal("replicate/llama2"),
|
||||||
|
z.literal("anthropic"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
export type SupportedProvider = z.infer<typeof ZodSupportedProvider>;
|
||||||
|
|
||||||
|
export type Model = {
|
||||||
|
name: string;
|
||||||
|
contextWindow: number;
|
||||||
|
promptTokenPrice?: number;
|
||||||
|
completionTokenPrice?: number;
|
||||||
|
pricePerSecond?: number;
|
||||||
|
speed: "fast" | "medium" | "slow";
|
||||||
|
provider: SupportedProvider;
|
||||||
|
description?: string;
|
||||||
|
learnMoreUrl?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type ProviderModel = { provider: z.infer<typeof ZodSupportedProvider>; model: string };
|
||||||
|
|
||||||
|
export type RefinementAction = { icon?: IconType; description: string; instructions: string };
|
||||||
|
|
||||||
|
export type FrontendModelProvider<SupportedModels extends string, OutputSchema> = {
|
||||||
|
name: string;
|
||||||
|
models: Record<SupportedModels, Model>;
|
||||||
|
refinementActions?: Record<string, RefinementAction>;
|
||||||
|
|
||||||
|
normalizeOutput: (output: OutputSchema) => NormalizedOutput;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type CompletionResponse<T> =
|
||||||
|
| { type: "error"; message: string; autoRetry: boolean; statusCode?: number }
|
||||||
|
| {
|
||||||
|
type: "success";
|
||||||
|
value: T;
|
||||||
|
timeToComplete: number;
|
||||||
|
statusCode: number;
|
||||||
|
promptTokens?: number;
|
||||||
|
completionTokens?: number;
|
||||||
|
cost?: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type ModelProvider<SupportedModels extends string, InputSchema, OutputSchema> = {
|
||||||
|
getModel: (input: InputSchema) => SupportedModels | null;
|
||||||
|
canStream: boolean;
|
||||||
|
inputSchema: JSONSchema4;
|
||||||
|
getCompletion: (
|
||||||
|
input: InputSchema,
|
||||||
|
onStream: ((partialOutput: OutputSchema) => void) | null,
|
||||||
|
) => Promise<CompletionResponse<OutputSchema>>;
|
||||||
|
|
||||||
|
// This is just a convenience for type inference, don't use it at runtime
|
||||||
|
_outputSchema?: OutputSchema | null;
|
||||||
|
} & FrontendModelProvider<SupportedModels, OutputSchema>;
|
||||||
|
|
||||||
|
export type NormalizedOutput =
|
||||||
|
| {
|
||||||
|
type: "text";
|
||||||
|
value: string;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
type: "json";
|
||||||
|
value: JsonValue;
|
||||||
|
};
|
||||||
@@ -2,11 +2,13 @@ import { type Session } from "next-auth";
|
|||||||
import { SessionProvider } from "next-auth/react";
|
import { SessionProvider } from "next-auth/react";
|
||||||
import { type AppType } from "next/app";
|
import { type AppType } from "next/app";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { ChakraProvider } from "@chakra-ui/react";
|
|
||||||
import theme from "~/utils/theme";
|
|
||||||
import Favicon from "~/components/Favicon";
|
import Favicon from "~/components/Favicon";
|
||||||
import "~/utils/analytics";
|
import "~/utils/analytics";
|
||||||
import Head from "next/head";
|
import Head from "next/head";
|
||||||
|
import { ChakraThemeProvider } from "~/theme/ChakraThemeProvider";
|
||||||
|
import { SyncAppStore } from "~/state/sync";
|
||||||
|
import NextAdapterApp from "next-query-params/app";
|
||||||
|
import { QueryParamProvider } from "use-query-params";
|
||||||
|
|
||||||
const MyApp: AppType<{ session: Session | null }> = ({
|
const MyApp: AppType<{ session: Session | null }> = ({
|
||||||
Component,
|
Component,
|
||||||
@@ -19,12 +21,26 @@ const MyApp: AppType<{ session: Session | null }> = ({
|
|||||||
name="viewport"
|
name="viewport"
|
||||||
content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=0"
|
content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=0"
|
||||||
/>
|
/>
|
||||||
|
<meta name="og:title" content="OpenPipe: Open-Source Lab for LLMs" key="title" />
|
||||||
|
<meta
|
||||||
|
name="og:description"
|
||||||
|
content="OpenPipe is a powerful playground for quickly optimizing performance, cost, and speed across models."
|
||||||
|
key="description"
|
||||||
|
/>
|
||||||
|
<meta name="og:image" content="/og.png" key="og-image" />
|
||||||
|
<meta property="og:image:height" content="630" />
|
||||||
|
<meta property="og:image:width" content="1200" />
|
||||||
|
<meta name="twitter:card" content="summary_large_image" />
|
||||||
|
<meta name="twitter:image" content="/og.png" />
|
||||||
</Head>
|
</Head>
|
||||||
<SessionProvider session={session}>
|
<SessionProvider session={session}>
|
||||||
|
<SyncAppStore />
|
||||||
<Favicon />
|
<Favicon />
|
||||||
<ChakraProvider theme={theme}>
|
<ChakraThemeProvider>
|
||||||
|
<QueryParamProvider adapter={NextAdapterApp}>
|
||||||
<Component {...pageProps} />
|
<Component {...pageProps} />
|
||||||
</ChakraProvider>
|
</QueryParamProvider>
|
||||||
|
</ChakraThemeProvider>
|
||||||
</SessionProvider>
|
</SessionProvider>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|||||||
81
src/pages/api/experiments/og-image.tsx
Normal file
81
src/pages/api/experiments/og-image.tsx
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import { ImageResponse } from "@vercel/og";
|
||||||
|
import { type NextApiRequest, type NextApiResponse } from "next";
|
||||||
|
|
||||||
|
export const config = {
|
||||||
|
runtime: "experimental-edge",
|
||||||
|
};
|
||||||
|
|
||||||
|
const inconsolataRegularFontP = fetch(
|
||||||
|
new URL("../../../../public/fonts/Inconsolata_SemiExpanded-Medium.ttf", import.meta.url),
|
||||||
|
).then((res) => res.arrayBuffer());
|
||||||
|
|
||||||
|
const OgImage = async (req: NextApiRequest, res: NextApiResponse) => {
|
||||||
|
// @ts-expect-error - nextUrl is not defined on NextApiRequest for some reason
|
||||||
|
const searchParams = req.nextUrl?.searchParams as URLSearchParams;
|
||||||
|
const experimentLabel = searchParams.get("experimentLabel");
|
||||||
|
const variantsCount = searchParams.get("variantsCount");
|
||||||
|
const scenariosCount = searchParams.get("scenariosCount");
|
||||||
|
|
||||||
|
const inconsolataRegularFont = await inconsolataRegularFontP;
|
||||||
|
|
||||||
|
return new ImageResponse(
|
||||||
|
(
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
width: "100%",
|
||||||
|
height: "100%",
|
||||||
|
display: "flex",
|
||||||
|
flexDirection: "column",
|
||||||
|
alignItems: "center",
|
||||||
|
justifyContent: "center",
|
||||||
|
fontSize: 48,
|
||||||
|
padding: "48px",
|
||||||
|
background: "white",
|
||||||
|
position: "relative",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
position: "absolute",
|
||||||
|
top: 0,
|
||||||
|
left: 0,
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
padding: 48,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{/* eslint-disable-next-line @next/next/no-img-element */}
|
||||||
|
<img
|
||||||
|
src="https://app.openpipe.ai/logo.svg"
|
||||||
|
alt="OpenPipe Logo"
|
||||||
|
height={100}
|
||||||
|
width={120}
|
||||||
|
/>
|
||||||
|
<div style={{ marginLeft: 24, fontSize: 64, fontFamily: "Inconsolata" }}>OpenPipe</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div style={{ display: "flex", fontSize: 72, marginTop: 108 }}>{experimentLabel}</div>
|
||||||
|
<div style={{ display: "flex", flexDirection: "column", marginTop: 36 }}>
|
||||||
|
<div style={{ display: "flex" }}>
|
||||||
|
<span style={{ width: 320 }}>Variants:</span> {variantsCount}
|
||||||
|
</div>
|
||||||
|
<div style={{ display: "flex", marginTop: 24 }}>
|
||||||
|
<span style={{ width: 320 }}>Scenarios:</span> {scenariosCount}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
),
|
||||||
|
{
|
||||||
|
fonts: [
|
||||||
|
{
|
||||||
|
name: "inconsolata",
|
||||||
|
data: inconsolataRegularFont,
|
||||||
|
style: "normal",
|
||||||
|
weight: 400,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default OgImage;
|
||||||
@@ -2,102 +2,66 @@ import {
|
|||||||
Box,
|
Box,
|
||||||
Breadcrumb,
|
Breadcrumb,
|
||||||
BreadcrumbItem,
|
BreadcrumbItem,
|
||||||
Button,
|
|
||||||
Center,
|
Center,
|
||||||
Flex,
|
Flex,
|
||||||
Icon,
|
Icon,
|
||||||
Input,
|
Input,
|
||||||
AlertDialog,
|
|
||||||
AlertDialogBody,
|
|
||||||
AlertDialogFooter,
|
|
||||||
AlertDialogHeader,
|
|
||||||
AlertDialogContent,
|
|
||||||
AlertDialogOverlay,
|
|
||||||
useDisclosure,
|
|
||||||
Text,
|
Text,
|
||||||
HStack,
|
|
||||||
VStack,
|
VStack,
|
||||||
} from "@chakra-ui/react";
|
} from "@chakra-ui/react";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
|
|
||||||
import { useRouter } from "next/router";
|
import { useRouter } from "next/router";
|
||||||
import { useState, useEffect, useRef } from "react";
|
import { useState, useEffect } from "react";
|
||||||
import { BsGearFill, BsTrash } from "react-icons/bs";
|
|
||||||
import { RiFlaskLine } from "react-icons/ri";
|
import { RiFlaskLine } from "react-icons/ri";
|
||||||
import OutputsTable from "~/components/OutputsTable";
|
import OutputsTable from "~/components/OutputsTable";
|
||||||
import SettingsDrawer from "~/components/OutputsTable/SettingsDrawer";
|
import ExperimentSettingsDrawer from "~/components/ExperimentSettingsDrawer/ExperimentSettingsDrawer";
|
||||||
import AppShell from "~/components/nav/AppShell";
|
import AppShell from "~/components/nav/AppShell";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
import { useAppStore } from "~/state/store";
|
import { useAppStore } from "~/state/store";
|
||||||
import { useSyncVariantEditor } from "~/state/sync";
|
import { useSyncVariantEditor } from "~/state/sync";
|
||||||
|
import { HeaderButtons } from "~/components/experiments/HeaderButtons/HeaderButtons";
|
||||||
|
import Head from "next/head";
|
||||||
|
|
||||||
const DeleteButton = () => {
|
// TODO: import less to fix deployment with server side props
|
||||||
const experiment = useExperiment();
|
// export const getServerSideProps = async (context: GetServerSidePropsContext<{ id: string }>) => {
|
||||||
const mutation = api.experiments.delete.useMutation();
|
// const experimentId = context.params?.id as string;
|
||||||
const utils = api.useContext();
|
|
||||||
const router = useRouter();
|
|
||||||
|
|
||||||
const { isOpen, onOpen, onClose } = useDisclosure();
|
// const helpers = createServerSideHelpers({
|
||||||
const cancelRef = useRef<HTMLButtonElement>(null);
|
// router: appRouter,
|
||||||
|
// ctx: createInnerTRPCContext({ session: null }),
|
||||||
|
// transformer: superjson, // optional - adds superjson serialization
|
||||||
|
// });
|
||||||
|
|
||||||
const [onDeleteConfirm] = useHandledAsyncCallback(async () => {
|
// // prefetch query
|
||||||
if (!experiment.data?.id) return;
|
// await helpers.experiments.stats.prefetch({ id: experimentId });
|
||||||
await mutation.mutateAsync({ id: experiment.data.id });
|
|
||||||
await utils.experiments.list.invalidate();
|
|
||||||
await router.push({ pathname: "/experiments" });
|
|
||||||
onClose();
|
|
||||||
}, [mutation, experiment.data?.id, router]);
|
|
||||||
|
|
||||||
return (
|
// return {
|
||||||
<>
|
// props: {
|
||||||
<Button
|
// trpcState: helpers.dehydrate(),
|
||||||
size="sm"
|
// },
|
||||||
variant={{ base: "outline", lg: "ghost" }}
|
// };
|
||||||
colorScheme="gray"
|
// };
|
||||||
fontWeight="normal"
|
|
||||||
onClick={onOpen}
|
|
||||||
>
|
|
||||||
<Icon as={BsTrash} boxSize={4} color="gray.600" />
|
|
||||||
<Text display={{ base: "none", lg: "block" }} ml={2}>
|
|
||||||
Delete Experiment
|
|
||||||
</Text>
|
|
||||||
</Button>
|
|
||||||
|
|
||||||
<AlertDialog isOpen={isOpen} leastDestructiveRef={cancelRef} onClose={onClose}>
|
|
||||||
<AlertDialogOverlay>
|
|
||||||
<AlertDialogContent>
|
|
||||||
<AlertDialogHeader fontSize="lg" fontWeight="bold">
|
|
||||||
Delete Experiment
|
|
||||||
</AlertDialogHeader>
|
|
||||||
|
|
||||||
<AlertDialogBody>
|
|
||||||
If you delete this experiment all the associated prompts and scenarios will be deleted
|
|
||||||
as well. Are you sure?
|
|
||||||
</AlertDialogBody>
|
|
||||||
|
|
||||||
<AlertDialogFooter>
|
|
||||||
<Button ref={cancelRef} onClick={onClose}>
|
|
||||||
Cancel
|
|
||||||
</Button>
|
|
||||||
<Button colorScheme="red" onClick={onDeleteConfirm} ml={3}>
|
|
||||||
Delete
|
|
||||||
</Button>
|
|
||||||
</AlertDialogFooter>
|
|
||||||
</AlertDialogContent>
|
|
||||||
</AlertDialogOverlay>
|
|
||||||
</AlertDialog>
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default function Experiment() {
|
export default function Experiment() {
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const experiment = useExperiment();
|
|
||||||
const utils = api.useContext();
|
const utils = api.useContext();
|
||||||
const openDrawer = useAppStore((s) => s.openDrawer);
|
|
||||||
useSyncVariantEditor();
|
useSyncVariantEditor();
|
||||||
|
|
||||||
|
const experiment = useExperiment();
|
||||||
|
const experimentStats = api.experiments.stats.useQuery(
|
||||||
|
{ id: router.query.id as string },
|
||||||
|
{
|
||||||
|
enabled: !!router.query.id,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
const stats = experimentStats.data;
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
useAppStore.getState().sharedVariantEditor.loadMonaco().catch(console.error);
|
||||||
|
});
|
||||||
|
|
||||||
const [label, setLabel] = useState(experiment.data?.label || "");
|
const [label, setLabel] = useState(experiment.data?.label || "");
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
setLabel(experiment.data?.label || "");
|
setLabel(experiment.data?.label || "");
|
||||||
@@ -127,6 +91,17 @@ export default function Experiment() {
|
|||||||
const canModify = experiment.data?.access.canModify ?? false;
|
const canModify = experiment.data?.access.canModify ?? false;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
<>
|
||||||
|
{stats && (
|
||||||
|
<Head>
|
||||||
|
<meta property="og:title" content={stats.experimentLabel} key="title" />
|
||||||
|
<meta
|
||||||
|
property="og:image"
|
||||||
|
content={`/api/experiments/og-image?experimentLabel=${stats.experimentLabel}&variantsCount=${stats.promptVariantCount}&scenariosCount=${stats.testScenarioCount}`}
|
||||||
|
key="og-image"
|
||||||
|
/>
|
||||||
|
</Head>
|
||||||
|
)}
|
||||||
<AppShell title={experiment.data?.label}>
|
<AppShell title={experiment.data?.label}>
|
||||||
<VStack h="full">
|
<VStack h="full">
|
||||||
<Flex
|
<Flex
|
||||||
@@ -134,7 +109,7 @@ export default function Experiment() {
|
|||||||
py={2}
|
py={2}
|
||||||
w="full"
|
w="full"
|
||||||
direction={{ base: "column", sm: "row" }}
|
direction={{ base: "column", sm: "row" }}
|
||||||
alignItems="flex-start"
|
alignItems={{ base: "flex-start", sm: "center" }}
|
||||||
>
|
>
|
||||||
<Breadcrumb flex={1}>
|
<Breadcrumb flex={1}>
|
||||||
<BreadcrumbItem>
|
<BreadcrumbItem>
|
||||||
@@ -167,29 +142,14 @@ export default function Experiment() {
|
|||||||
)}
|
)}
|
||||||
</BreadcrumbItem>
|
</BreadcrumbItem>
|
||||||
</Breadcrumb>
|
</Breadcrumb>
|
||||||
{canModify && (
|
<HeaderButtons />
|
||||||
<HStack>
|
|
||||||
<Button
|
|
||||||
size="sm"
|
|
||||||
variant={{ base: "outline", lg: "ghost" }}
|
|
||||||
colorScheme="gray"
|
|
||||||
fontWeight="normal"
|
|
||||||
onClick={openDrawer}
|
|
||||||
>
|
|
||||||
<Icon as={BsGearFill} boxSize={4} color="gray.600" />
|
|
||||||
<Text display={{ base: "none", lg: "block" }} ml={2}>
|
|
||||||
Edit Vars & Evals
|
|
||||||
</Text>
|
|
||||||
</Button>
|
|
||||||
<DeleteButton />
|
|
||||||
</HStack>
|
|
||||||
)}
|
|
||||||
</Flex>
|
</Flex>
|
||||||
<SettingsDrawer />
|
<ExperimentSettingsDrawer />
|
||||||
<Box w="100%" overflowX="auto" flex={1}>
|
<Box w="100%" overflowX="auto" flex={1}>
|
||||||
<OutputsTable experimentId={router.query.id as string | undefined} />
|
<OutputsTable experimentId={router.query.id as string | undefined} />
|
||||||
</Box>
|
</Box>
|
||||||
</VStack>
|
</VStack>
|
||||||
</AppShell>
|
</AppShell>
|
||||||
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,18 +13,24 @@ import {
|
|||||||
import { RiFlaskLine } from "react-icons/ri";
|
import { RiFlaskLine } from "react-icons/ri";
|
||||||
import AppShell from "~/components/nav/AppShell";
|
import AppShell from "~/components/nav/AppShell";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { ExperimentCard, NewExperimentCard } from "~/components/experiments/ExperimentCard";
|
import {
|
||||||
|
ExperimentCard,
|
||||||
|
ExperimentCardSkeleton,
|
||||||
|
NewExperimentCard,
|
||||||
|
} from "~/components/experiments/ExperimentCard";
|
||||||
import { signIn, useSession } from "next-auth/react";
|
import { signIn, useSession } from "next-auth/react";
|
||||||
|
|
||||||
export default function ExperimentsPage() {
|
export default function ExperimentsPage() {
|
||||||
const experiments = api.experiments.list.useQuery();
|
const experiments = api.experiments.list.useQuery();
|
||||||
|
|
||||||
const user = useSession().data;
|
const user = useSession().data;
|
||||||
|
const authLoading = useSession().status === "loading";
|
||||||
|
|
||||||
if (user === null) {
|
if (user === null || authLoading) {
|
||||||
return (
|
return (
|
||||||
<AppShell title="Experiments">
|
<AppShell title="Experiments">
|
||||||
<Center h="100%">
|
<Center h="100%">
|
||||||
|
{!authLoading && (
|
||||||
<Text>
|
<Text>
|
||||||
<Link
|
<Link
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
@@ -36,6 +42,7 @@ export default function ExperimentsPage() {
|
|||||||
</Link>{" "}
|
</Link>{" "}
|
||||||
to view or create new experiments!
|
to view or create new experiments!
|
||||||
</Text>
|
</Text>
|
||||||
|
)}
|
||||||
</Center>
|
</Center>
|
||||||
</AppShell>
|
</AppShell>
|
||||||
);
|
);
|
||||||
@@ -44,7 +51,7 @@ export default function ExperimentsPage() {
|
|||||||
return (
|
return (
|
||||||
<AppShell title="Experiments">
|
<AppShell title="Experiments">
|
||||||
<VStack alignItems={"flex-start"} px={4} py={2}>
|
<VStack alignItems={"flex-start"} px={4} py={2}>
|
||||||
<HStack minH={8} align="center">
|
<HStack minH={8} align="center" pt={2}>
|
||||||
<Breadcrumb flex={1}>
|
<Breadcrumb flex={1}>
|
||||||
<BreadcrumbItem>
|
<BreadcrumbItem>
|
||||||
<Flex alignItems="center">
|
<Flex alignItems="center">
|
||||||
@@ -55,7 +62,15 @@ export default function ExperimentsPage() {
|
|||||||
</HStack>
|
</HStack>
|
||||||
<SimpleGrid w="full" columns={{ base: 1, md: 2, lg: 3, xl: 4 }} spacing={8} p="4">
|
<SimpleGrid w="full" columns={{ base: 1, md: 2, lg: 3, xl: 4 }} spacing={8} p="4">
|
||||||
<NewExperimentCard />
|
<NewExperimentCard />
|
||||||
{experiments?.data?.map((exp) => <ExperimentCard key={exp.id} exp={exp} />)}
|
{experiments.data && !experiments.isLoading ? (
|
||||||
|
experiments?.data?.map((exp) => <ExperimentCard key={exp.id} exp={exp} />)
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<ExperimentCardSkeleton />
|
||||||
|
<ExperimentCardSkeleton />
|
||||||
|
<ExperimentCardSkeleton />
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</SimpleGrid>
|
</SimpleGrid>
|
||||||
</VStack>
|
</VStack>
|
||||||
</AppShell>
|
</AppShell>
|
||||||
|
|||||||
15
src/pages/world-champs/index.tsx
Normal file
15
src/pages/world-champs/index.tsx
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { type GetServerSideProps } from "next";
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/require-await
|
||||||
|
export const getServerSideProps: GetServerSideProps = async () => {
|
||||||
|
return {
|
||||||
|
redirect: {
|
||||||
|
destination: "/world-champs/signup",
|
||||||
|
permanent: false,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function WorldChamps() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
201
src/pages/world-champs/signup.tsx
Normal file
201
src/pages/world-champs/signup.tsx
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
import {
|
||||||
|
Box,
|
||||||
|
type BoxProps,
|
||||||
|
Button,
|
||||||
|
DarkMode,
|
||||||
|
GlobalStyle,
|
||||||
|
HStack,
|
||||||
|
Heading,
|
||||||
|
Icon,
|
||||||
|
Link,
|
||||||
|
Table,
|
||||||
|
Tbody,
|
||||||
|
Td,
|
||||||
|
Text,
|
||||||
|
type TextProps,
|
||||||
|
Th,
|
||||||
|
Tr,
|
||||||
|
VStack,
|
||||||
|
useInterval,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
|
import { signIn, useSession } from "next-auth/react";
|
||||||
|
import Head from "next/head";
|
||||||
|
import { useCallback, useState } from "react";
|
||||||
|
import { BsGithub } from "react-icons/bs";
|
||||||
|
import UserMenu from "~/components/nav/UserMenu";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import dayjs from "~/utils/dayjs";
|
||||||
|
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
|
|
||||||
|
// Shows how long until the competition starts. Refreshes every second
|
||||||
|
function CountdownTimer(props: { date: Date } & TextProps) {
|
||||||
|
const [now, setNow] = useState(dayjs(0));
|
||||||
|
|
||||||
|
useInterval(() => {
|
||||||
|
setNow(dayjs());
|
||||||
|
}, 1000);
|
||||||
|
|
||||||
|
const { date, ...rest } = props;
|
||||||
|
|
||||||
|
const kickoff = dayjs(props.date);
|
||||||
|
const diff = kickoff.diff(now, "second");
|
||||||
|
const days = Math.floor(diff / 86400);
|
||||||
|
const hours = Math.floor((diff % 86400) / 3600);
|
||||||
|
const minutes = Math.floor((diff % 3600) / 60);
|
||||||
|
const seconds = Math.floor(diff % 60);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Text {...rest}>
|
||||||
|
<Text as="span" fontWeight="bold">
|
||||||
|
Kickoff in
|
||||||
|
</Text>{" "}
|
||||||
|
{days}d {hours}h {minutes}m {seconds}s
|
||||||
|
</Text>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function ApplicationStatus(props: BoxProps) {
|
||||||
|
const user = useSession().data;
|
||||||
|
const entrant = api.worldChamps.userStatus.useQuery().data;
|
||||||
|
const applyMutation = api.worldChamps.apply.useMutation();
|
||||||
|
|
||||||
|
const utils = api.useContext();
|
||||||
|
|
||||||
|
const [onSignIn] = useHandledAsyncCallback(async () => {
|
||||||
|
await signIn("github");
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const [onApply] = useHandledAsyncCallback(async () => {
|
||||||
|
await applyMutation.mutateAsync();
|
||||||
|
await utils.worldChamps.userStatus.invalidate();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const Wrapper = useCallback(
|
||||||
|
(wrapperProps: BoxProps) => (
|
||||||
|
<Box {...props} {...wrapperProps} minH="120px" alignItems="center" justifyItems="center" />
|
||||||
|
),
|
||||||
|
[props],
|
||||||
|
);
|
||||||
|
|
||||||
|
if (user === null) {
|
||||||
|
return (
|
||||||
|
<Wrapper>
|
||||||
|
<Button onClick={onSignIn} colorScheme="orange" leftIcon={<Icon as={BsGithub} />}>
|
||||||
|
Connect GitHub to apply
|
||||||
|
</Button>
|
||||||
|
</Wrapper>
|
||||||
|
);
|
||||||
|
} else if (user) {
|
||||||
|
return (
|
||||||
|
<Wrapper>
|
||||||
|
<HStack spacing={8}>
|
||||||
|
<UserMenu user={user} borderRadius={2} borderColor={"gray.700"} borderWidth={1} pr={6} />
|
||||||
|
<Box flex={1}>
|
||||||
|
{entrant?.approved ? (
|
||||||
|
<Text fontSize="sm">
|
||||||
|
You're accepted! We'll send you more details before August 14th.
|
||||||
|
</Text>
|
||||||
|
) : entrant ? (
|
||||||
|
<Text fontSize="sm">
|
||||||
|
Application submitted successfully! We'll notify you by email before August 14th.
|
||||||
|
</Text>
|
||||||
|
) : (
|
||||||
|
<Button onClick={onApply} colorScheme="orange">
|
||||||
|
Apply to compete
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</Box>
|
||||||
|
</HStack>
|
||||||
|
</Wrapper>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return <Wrapper />;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function Signup() {
|
||||||
|
return (
|
||||||
|
<DarkMode>
|
||||||
|
<GlobalStyle />
|
||||||
|
|
||||||
|
<Head>
|
||||||
|
<title>🏆 Prompt Engineering World Championships</title>
|
||||||
|
<meta property="og:title" content="🏆 Prompt Engineering World Championships" key="title" />
|
||||||
|
<meta
|
||||||
|
property="og:description"
|
||||||
|
content="Think you have what it takes to be the best? Compete with the world's top prompt engineers and see where you rank!"
|
||||||
|
key="description"
|
||||||
|
/>
|
||||||
|
</Head>
|
||||||
|
|
||||||
|
<Box bgColor="gray.900" color="gray.200" minH="100vh" w="full">
|
||||||
|
<VStack mx="auto" py={24} maxW="2xl" align="start" fontSize="lg">
|
||||||
|
<Heading size="lg">🏆 Prompt Engineering World Championships</Heading>
|
||||||
|
<CountdownTimer
|
||||||
|
date={new Date("2023-08-14T00:00:00Z")}
|
||||||
|
fontSize="2xl"
|
||||||
|
alignSelf="center"
|
||||||
|
color="gray.500"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<ApplicationStatus py={8} alignSelf="center" />
|
||||||
|
|
||||||
|
<Text fontSize="lg">
|
||||||
|
Think you have what it takes to be the best? Compete with the world's top prompt
|
||||||
|
engineers and see where you rank!
|
||||||
|
</Text>
|
||||||
|
|
||||||
|
<Heading size="lg" pt={12} alignSelf="left">
|
||||||
|
Event Details
|
||||||
|
</Heading>
|
||||||
|
<Table variant="simple">
|
||||||
|
<Tbody>
|
||||||
|
<Tr>
|
||||||
|
<Th>Kickoff</Th>
|
||||||
|
<Td>August 14</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Prize</Th>
|
||||||
|
<Td>$15,000 grand prize + smaller category prizes.</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Events</Th>
|
||||||
|
<Td>
|
||||||
|
Optimize prompts for multiple tasks selected from academic benchmarks and
|
||||||
|
real-world applications.
|
||||||
|
</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Models</Th>
|
||||||
|
<Td>Separate "weight classes" for GPT 3.5, Claude Instant, and Llama 2.</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Qualifications</Th>
|
||||||
|
<Td>Open to entrants with any level of experience.</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Certificates</Th>
|
||||||
|
<Td>Certificate of mastery for all qualifying participants.</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Cost</Th>
|
||||||
|
<Td>
|
||||||
|
<strong>Free</strong>. We'll cover your inference budget.
|
||||||
|
</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Questions?</Th>
|
||||||
|
<Td>
|
||||||
|
<Link href="mailto:world-champs@openpipe.ai" textDecor="underline">
|
||||||
|
Email us
|
||||||
|
</Link>{" "}
|
||||||
|
with any follow-up questions!
|
||||||
|
</Td>
|
||||||
|
</Tr>
|
||||||
|
</Tbody>
|
||||||
|
</Table>
|
||||||
|
</VStack>
|
||||||
|
</Box>
|
||||||
|
</DarkMode>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -5,6 +5,7 @@ import { scenariosRouter } from "./routers/scenarios.router";
|
|||||||
import { scenarioVariantCellsRouter } from "./routers/scenarioVariantCells.router";
|
import { scenarioVariantCellsRouter } from "./routers/scenarioVariantCells.router";
|
||||||
import { templateVarsRouter } from "./routers/templateVariables.router";
|
import { templateVarsRouter } from "./routers/templateVariables.router";
|
||||||
import { evaluationsRouter } from "./routers/evaluations.router";
|
import { evaluationsRouter } from "./routers/evaluations.router";
|
||||||
|
import { worldChampsRouter } from "./routers/worldChamps.router";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is the primary router for your server.
|
* This is the primary router for your server.
|
||||||
@@ -18,6 +19,7 @@ export const appRouter = createTRPCRouter({
|
|||||||
scenarioVariantCells: scenarioVariantCellsRouter,
|
scenarioVariantCells: scenarioVariantCellsRouter,
|
||||||
templateVars: templateVarsRouter,
|
templateVars: templateVarsRouter,
|
||||||
evaluations: evaluationsRouter,
|
evaluations: evaluationsRouter,
|
||||||
|
worldChamps: worldChampsRouter,
|
||||||
});
|
});
|
||||||
|
|
||||||
// export type definition of API
|
// export type definition of API
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { EvalType } from "@prisma/client";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
||||||
import { prisma } from "~/server/db";
|
import { prisma } from "~/server/db";
|
||||||
import { runAllEvals } from "~/server/utils/evaluations";
|
import { queueRunNewEval } from "~/server/tasks/runNewEval.task";
|
||||||
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
|
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
|
||||||
|
|
||||||
export const evaluationsRouter = createTRPCRouter({
|
export const evaluationsRouter = createTRPCRouter({
|
||||||
@@ -40,9 +40,7 @@ export const evaluationsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: this may be a bad UX for slow evals (eg. GPT-4 evals) Maybe need
|
await queueRunNewEval(input.experimentId);
|
||||||
// to kick off a background job or something instead
|
|
||||||
await runAllEvals(input.experimentId);
|
|
||||||
}),
|
}),
|
||||||
|
|
||||||
update: protectedProcedure
|
update: protectedProcedure
|
||||||
@@ -78,7 +76,7 @@ export const evaluationsRouter = createTRPCRouter({
|
|||||||
});
|
});
|
||||||
// Re-run all evals. Other eval results will already be cached, so this
|
// Re-run all evals. Other eval results will already be cached, so this
|
||||||
// should only re-run the updated one.
|
// should only re-run the updated one.
|
||||||
await runAllEvals(evaluation.experimentId);
|
await queueRunNewEval(experimentId);
|
||||||
}),
|
}),
|
||||||
|
|
||||||
delete: protectedProcedure
|
delete: protectedProcedure
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
||||||
|
import { type Prisma } from "@prisma/client";
|
||||||
import { prisma } from "~/server/db";
|
import { prisma } from "~/server/db";
|
||||||
import dedent from "dedent";
|
import dedent from "dedent";
|
||||||
import { generateNewCell } from "~/server/utils/generateNewCell";
|
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||||
@@ -10,8 +12,36 @@ import {
|
|||||||
requireNothing,
|
requireNothing,
|
||||||
} from "~/utils/accessControl";
|
} from "~/utils/accessControl";
|
||||||
import userOrg from "~/server/utils/userOrg";
|
import userOrg from "~/server/utils/userOrg";
|
||||||
|
import generateTypes from "~/modelProviders/generateTypes";
|
||||||
|
|
||||||
export const experimentsRouter = createTRPCRouter({
|
export const experimentsRouter = createTRPCRouter({
|
||||||
|
stats: publicProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
|
||||||
|
await requireCanViewExperiment(input.id, ctx);
|
||||||
|
|
||||||
|
const [experiment, promptVariantCount, testScenarioCount] = await prisma.$transaction([
|
||||||
|
prisma.experiment.findFirstOrThrow({
|
||||||
|
where: { id: input.id },
|
||||||
|
}),
|
||||||
|
prisma.promptVariant.count({
|
||||||
|
where: {
|
||||||
|
experimentId: input.id,
|
||||||
|
visible: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.testScenario.count({
|
||||||
|
where: {
|
||||||
|
experimentId: input.id,
|
||||||
|
visible: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
experimentLabel: experiment.label,
|
||||||
|
promptVariantCount,
|
||||||
|
testScenarioCount,
|
||||||
|
};
|
||||||
|
}),
|
||||||
list: protectedProcedure.query(async ({ ctx }) => {
|
list: protectedProcedure.query(async ({ ctx }) => {
|
||||||
// Anyone can list experiments
|
// Anyone can list experiments
|
||||||
requireNothing(ctx);
|
requireNothing(ctx);
|
||||||
@@ -19,7 +49,7 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
const experiments = await prisma.experiment.findMany({
|
const experiments = await prisma.experiment.findMany({
|
||||||
where: {
|
where: {
|
||||||
organization: {
|
organization: {
|
||||||
OrganizationUser: {
|
organizationUsers: {
|
||||||
some: { userId: ctx.session.user.id },
|
some: { userId: ctx.session.user.id },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -76,6 +106,189 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
fork: protectedProcedure.input(z.object({ id: z.string() })).mutation(async ({ input, ctx }) => {
|
||||||
|
await requireCanViewExperiment(input.id, ctx);
|
||||||
|
|
||||||
|
const [
|
||||||
|
existingExp,
|
||||||
|
existingVariants,
|
||||||
|
existingScenarios,
|
||||||
|
existingCells,
|
||||||
|
evaluations,
|
||||||
|
templateVariables,
|
||||||
|
] = await prisma.$transaction([
|
||||||
|
prisma.experiment.findUniqueOrThrow({
|
||||||
|
where: {
|
||||||
|
id: input.id,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.promptVariant.findMany({
|
||||||
|
where: {
|
||||||
|
experimentId: input.id,
|
||||||
|
visible: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.testScenario.findMany({
|
||||||
|
where: {
|
||||||
|
experimentId: input.id,
|
||||||
|
visible: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.scenarioVariantCell.findMany({
|
||||||
|
where: {
|
||||||
|
testScenario: {
|
||||||
|
visible: true,
|
||||||
|
},
|
||||||
|
promptVariant: {
|
||||||
|
experimentId: input.id,
|
||||||
|
visible: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
modelResponses: {
|
||||||
|
include: {
|
||||||
|
outputEvaluations: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.evaluation.findMany({
|
||||||
|
where: {
|
||||||
|
experimentId: input.id,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.templateVariable.findMany({
|
||||||
|
where: {
|
||||||
|
experimentId: input.id,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const newExperimentId = uuidv4();
|
||||||
|
|
||||||
|
const existingToNewVariantIds = new Map<string, string>();
|
||||||
|
const variantsToCreate: Prisma.PromptVariantCreateManyInput[] = [];
|
||||||
|
for (const variant of existingVariants) {
|
||||||
|
const newVariantId = uuidv4();
|
||||||
|
existingToNewVariantIds.set(variant.id, newVariantId);
|
||||||
|
variantsToCreate.push({
|
||||||
|
...variant,
|
||||||
|
id: newVariantId,
|
||||||
|
experimentId: newExperimentId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingToNewScenarioIds = new Map<string, string>();
|
||||||
|
const scenariosToCreate: Prisma.TestScenarioCreateManyInput[] = [];
|
||||||
|
for (const scenario of existingScenarios) {
|
||||||
|
const newScenarioId = uuidv4();
|
||||||
|
existingToNewScenarioIds.set(scenario.id, newScenarioId);
|
||||||
|
scenariosToCreate.push({
|
||||||
|
...scenario,
|
||||||
|
id: newScenarioId,
|
||||||
|
experimentId: newExperimentId,
|
||||||
|
variableValues: scenario.variableValues as Prisma.InputJsonValue,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingToNewEvaluationIds = new Map<string, string>();
|
||||||
|
const evaluationsToCreate: Prisma.EvaluationCreateManyInput[] = [];
|
||||||
|
for (const evaluation of evaluations) {
|
||||||
|
const newEvaluationId = uuidv4();
|
||||||
|
existingToNewEvaluationIds.set(evaluation.id, newEvaluationId);
|
||||||
|
evaluationsToCreate.push({
|
||||||
|
...evaluation,
|
||||||
|
id: newEvaluationId,
|
||||||
|
experimentId: newExperimentId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const cellsToCreate: Prisma.ScenarioVariantCellCreateManyInput[] = [];
|
||||||
|
const modelResponsesToCreate: Prisma.ModelResponseCreateManyInput[] = [];
|
||||||
|
const outputEvaluationsToCreate: Prisma.OutputEvaluationCreateManyInput[] = [];
|
||||||
|
for (const cell of existingCells) {
|
||||||
|
const newCellId = uuidv4();
|
||||||
|
const { modelResponses, ...cellData } = cell;
|
||||||
|
cellsToCreate.push({
|
||||||
|
...cellData,
|
||||||
|
id: newCellId,
|
||||||
|
promptVariantId: existingToNewVariantIds.get(cell.promptVariantId) ?? "",
|
||||||
|
testScenarioId: existingToNewScenarioIds.get(cell.testScenarioId) ?? "",
|
||||||
|
prompt: (cell.prompt as Prisma.InputJsonValue) ?? undefined,
|
||||||
|
});
|
||||||
|
for (const modelResponse of modelResponses) {
|
||||||
|
const newModelResponseId = uuidv4();
|
||||||
|
const { outputEvaluations, ...modelResponseData } = modelResponse;
|
||||||
|
modelResponsesToCreate.push({
|
||||||
|
...modelResponseData,
|
||||||
|
id: newModelResponseId,
|
||||||
|
scenarioVariantCellId: newCellId,
|
||||||
|
output: (modelResponse.output as Prisma.InputJsonValue) ?? undefined,
|
||||||
|
});
|
||||||
|
for (const evaluation of outputEvaluations) {
|
||||||
|
outputEvaluationsToCreate.push({
|
||||||
|
...evaluation,
|
||||||
|
id: uuidv4(),
|
||||||
|
modelResponseId: newModelResponseId,
|
||||||
|
evaluationId: existingToNewEvaluationIds.get(evaluation.evaluationId) ?? "",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const templateVariablesToCreate: Prisma.TemplateVariableCreateManyInput[] = [];
|
||||||
|
for (const templateVariable of templateVariables) {
|
||||||
|
templateVariablesToCreate.push({
|
||||||
|
...templateVariable,
|
||||||
|
id: uuidv4(),
|
||||||
|
experimentId: newExperimentId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const maxSortIndex =
|
||||||
|
(
|
||||||
|
await prisma.experiment.aggregate({
|
||||||
|
_max: {
|
||||||
|
sortIndex: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)._max?.sortIndex ?? 0;
|
||||||
|
|
||||||
|
await prisma.$transaction([
|
||||||
|
prisma.experiment.create({
|
||||||
|
data: {
|
||||||
|
id: newExperimentId,
|
||||||
|
sortIndex: maxSortIndex + 1,
|
||||||
|
label: `${existingExp.label} (forked)`,
|
||||||
|
organizationId: (await userOrg(ctx.session.user.id)).id,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.promptVariant.createMany({
|
||||||
|
data: variantsToCreate,
|
||||||
|
}),
|
||||||
|
prisma.testScenario.createMany({
|
||||||
|
data: scenariosToCreate,
|
||||||
|
}),
|
||||||
|
prisma.scenarioVariantCell.createMany({
|
||||||
|
data: cellsToCreate,
|
||||||
|
}),
|
||||||
|
prisma.modelResponse.createMany({
|
||||||
|
data: modelResponsesToCreate,
|
||||||
|
}),
|
||||||
|
prisma.evaluation.createMany({
|
||||||
|
data: evaluationsToCreate,
|
||||||
|
}),
|
||||||
|
prisma.outputEvaluation.createMany({
|
||||||
|
data: outputEvaluationsToCreate,
|
||||||
|
}),
|
||||||
|
prisma.templateVariable.createMany({
|
||||||
|
data: templateVariablesToCreate,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return newExperimentId;
|
||||||
|
}),
|
||||||
|
|
||||||
create: protectedProcedure.input(z.object({})).mutation(async ({ ctx }) => {
|
create: protectedProcedure.input(z.object({})).mutation(async ({ ctx }) => {
|
||||||
// Anyone can create an experiment
|
// Anyone can create an experiment
|
||||||
requireNothing(ctx);
|
requireNothing(ctx);
|
||||||
@@ -97,7 +310,7 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const [variant, _, scenario] = await prisma.$transaction([
|
const [variant, _, scenario1, scenario2, scenario3] = await prisma.$transaction([
|
||||||
prisma.promptVariant.create({
|
prisma.promptVariant.create({
|
||||||
data: {
|
data: {
|
||||||
experimentId: exp.id,
|
experimentId: exp.id,
|
||||||
@@ -108,43 +321,62 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
constructFn: dedent`
|
constructFn: dedent`
|
||||||
/**
|
/**
|
||||||
* Use Javascript to define an OpenAI chat completion
|
* Use Javascript to define an OpenAI chat completion
|
||||||
* (https://platform.openai.com/docs/api-reference/chat/create) and
|
* (https://platform.openai.com/docs/api-reference/chat/create).
|
||||||
* assign it to the \`prompt\` variable.
|
|
||||||
*
|
*
|
||||||
* You have access to the current scenario in the \`scenario\`
|
* You have access to the current scenario in the \`scenario\`
|
||||||
* variable.
|
* variable.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
prompt = {
|
definePrompt("openai/ChatCompletion", {
|
||||||
model: "gpt-3.5-turbo-0613",
|
model: "gpt-3.5-turbo-0613",
|
||||||
stream: true,
|
stream: true,
|
||||||
messages: [
|
messages: [
|
||||||
{
|
{
|
||||||
role: "system",
|
role: "system",
|
||||||
content: \`"Return 'this is output for the scenario "${"$"}{scenario.text}"'\`,
|
content: \`Write 'Start experimenting!' in ${"$"}{scenario.language}\`,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
};`,
|
});`,
|
||||||
model: "gpt-3.5-turbo-0613",
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
modelProvider: "openai/ChatCompletion",
|
||||||
|
constructFnVersion: 2,
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
prisma.templateVariable.create({
|
prisma.templateVariable.create({
|
||||||
data: {
|
data: {
|
||||||
experimentId: exp.id,
|
experimentId: exp.id,
|
||||||
label: "text",
|
label: "language",
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
prisma.testScenario.create({
|
prisma.testScenario.create({
|
||||||
data: {
|
data: {
|
||||||
experimentId: exp.id,
|
experimentId: exp.id,
|
||||||
variableValues: {
|
variableValues: {
|
||||||
text: "This is a test scenario.",
|
language: "English",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.testScenario.create({
|
||||||
|
data: {
|
||||||
|
experimentId: exp.id,
|
||||||
|
variableValues: {
|
||||||
|
language: "Spanish",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.testScenario.create({
|
||||||
|
data: {
|
||||||
|
experimentId: exp.id,
|
||||||
|
variableValues: {
|
||||||
|
language: "German",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
await generateNewCell(variant.id, scenario.id);
|
await generateNewCell(variant.id, scenario1.id);
|
||||||
|
await generateNewCell(variant.id, scenario2.id);
|
||||||
|
await generateNewCell(variant.id, scenario3.id);
|
||||||
|
|
||||||
return exp;
|
return exp;
|
||||||
}),
|
}),
|
||||||
@@ -174,4 +406,10 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
// Keeping these on `experiment` for now because we might want to limit the
|
||||||
|
// providers based on your account/experiment
|
||||||
|
promptTypes: publicProcedure.query(async () => {
|
||||||
|
return await generateTypes();
|
||||||
|
}),
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,16 +1,17 @@
|
|||||||
import { isObject } from "lodash-es";
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
||||||
import { prisma } from "~/server/db";
|
import { prisma } from "~/server/db";
|
||||||
|
import { Prisma } from "@prisma/client";
|
||||||
import { generateNewCell } from "~/server/utils/generateNewCell";
|
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||||
import { OpenAIChatModel, type SupportedModel } from "~/server/types";
|
|
||||||
import { constructPrompt } from "~/server/utils/constructPrompt";
|
|
||||||
import userError from "~/server/utils/error";
|
import userError from "~/server/utils/error";
|
||||||
import { recordExperimentUpdated } from "~/server/utils/recordExperimentUpdated";
|
import { recordExperimentUpdated } from "~/server/utils/recordExperimentUpdated";
|
||||||
import { reorderPromptVariants } from "~/server/utils/reorderPromptVariants";
|
import { reorderPromptVariants } from "~/server/utils/reorderPromptVariants";
|
||||||
import { type PromptVariant } from "@prisma/client";
|
import { type PromptVariant } from "@prisma/client";
|
||||||
import { deriveNewConstructFn } from "~/server/utils/deriveNewContructFn";
|
import { deriveNewConstructFn } from "~/server/utils/deriveNewContructFn";
|
||||||
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
|
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
|
||||||
|
import parseConstructFn from "~/server/utils/parseConstructFn";
|
||||||
|
import modelProviders from "~/modelProviders/modelProviders";
|
||||||
|
import { ZodSupportedProvider } from "~/modelProviders/types";
|
||||||
|
|
||||||
export const promptVariantsRouter = createTRPCRouter({
|
export const promptVariantsRouter = createTRPCRouter({
|
||||||
list: publicProcedure
|
list: publicProcedure
|
||||||
@@ -51,7 +52,9 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
id: true,
|
id: true,
|
||||||
},
|
},
|
||||||
where: {
|
where: {
|
||||||
modelOutput: {
|
modelResponse: {
|
||||||
|
outdated: false,
|
||||||
|
output: { not: Prisma.AnyNull },
|
||||||
scenarioVariantCell: {
|
scenarioVariantCell: {
|
||||||
promptVariant: {
|
promptVariant: {
|
||||||
id: input.variantId,
|
id: input.variantId,
|
||||||
@@ -93,14 +96,23 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
where: {
|
where: {
|
||||||
promptVariantId: input.variantId,
|
promptVariantId: input.variantId,
|
||||||
testScenario: { visible: true },
|
testScenario: { visible: true },
|
||||||
modelOutput: {
|
modelResponses: {
|
||||||
is: {},
|
some: {
|
||||||
|
outdated: false,
|
||||||
|
output: {
|
||||||
|
not: Prisma.AnyNull,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const overallTokens = await prisma.modelOutput.aggregate({
|
const overallTokens = await prisma.modelResponse.aggregate({
|
||||||
where: {
|
where: {
|
||||||
|
outdated: false,
|
||||||
|
output: {
|
||||||
|
not: Prisma.AnyNull,
|
||||||
|
},
|
||||||
scenarioVariantCell: {
|
scenarioVariantCell: {
|
||||||
promptVariantId: input.variantId,
|
promptVariantId: input.variantId,
|
||||||
testScenario: {
|
testScenario: {
|
||||||
@@ -118,16 +130,9 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
const promptTokens = overallTokens._sum?.promptTokens ?? 0;
|
const promptTokens = overallTokens._sum?.promptTokens ?? 0;
|
||||||
const completionTokens = overallTokens._sum?.completionTokens ?? 0;
|
const completionTokens = overallTokens._sum?.completionTokens ?? 0;
|
||||||
|
|
||||||
const awaitingRetrievals = !!(await prisma.scenarioVariantCell.findFirst({
|
const awaitingEvals = !!evalResults.find(
|
||||||
where: {
|
(result) => result.totalCount < scenarioCount * evals.length,
|
||||||
promptVariantId: input.variantId,
|
);
|
||||||
testScenario: { visible: true },
|
|
||||||
// Check if is PENDING or IN_PROGRESS
|
|
||||||
retrievalStatus: {
|
|
||||||
in: ["PENDING", "IN_PROGRESS"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
evalResults,
|
evalResults,
|
||||||
@@ -136,7 +141,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
overallCost: overallTokens._sum?.cost ?? 0,
|
overallCost: overallTokens._sum?.cost ?? 0,
|
||||||
scenarioCount,
|
scenarioCount,
|
||||||
outputCount,
|
outputCount,
|
||||||
awaitingRetrievals,
|
awaitingEvals,
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
|
|
||||||
@@ -145,7 +150,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
z.object({
|
z.object({
|
||||||
experimentId: z.string(),
|
experimentId: z.string(),
|
||||||
variantId: z.string().optional(),
|
variantId: z.string().optional(),
|
||||||
newModel: z.string().optional(),
|
streamScenarios: z.array(z.string()),
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input, ctx }) => {
|
.mutation(async ({ input, ctx }) => {
|
||||||
@@ -187,10 +192,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
? `${originalVariant?.label} Copy`
|
? `${originalVariant?.label} Copy`
|
||||||
: `Prompt Variant ${largestSortIndex + 2}`;
|
: `Prompt Variant ${largestSortIndex + 2}`;
|
||||||
|
|
||||||
const newConstructFn = await deriveNewConstructFn(
|
const newConstructFn = await deriveNewConstructFn(originalVariant);
|
||||||
originalVariant,
|
|
||||||
input.newModel as SupportedModel,
|
|
||||||
);
|
|
||||||
|
|
||||||
const createNewVariantAction = prisma.promptVariant.create({
|
const createNewVariantAction = prisma.promptVariant.create({
|
||||||
data: {
|
data: {
|
||||||
@@ -198,7 +200,9 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
label: newVariantLabel,
|
label: newVariantLabel,
|
||||||
sortIndex: (originalVariant?.sortIndex ?? 0) + 1,
|
sortIndex: (originalVariant?.sortIndex ?? 0) + 1,
|
||||||
constructFn: newConstructFn,
|
constructFn: newConstructFn,
|
||||||
|
constructFnVersion: 2,
|
||||||
model: originalVariant?.model ?? "gpt-3.5-turbo",
|
model: originalVariant?.model ?? "gpt-3.5-turbo",
|
||||||
|
modelProvider: originalVariant?.modelProvider ?? "openai/ChatCompletion",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -220,7 +224,9 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
});
|
});
|
||||||
|
|
||||||
for (const scenario of scenarios) {
|
for (const scenario of scenarios) {
|
||||||
await generateNewCell(newVariant.id, scenario.id);
|
await generateNewCell(newVariant.id, scenario.id, {
|
||||||
|
stream: input.streamScenarios.includes(scenario.id),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return newVariant;
|
return newVariant;
|
||||||
@@ -283,11 +289,17 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
return updatedPromptVariant;
|
return updatedPromptVariant;
|
||||||
}),
|
}),
|
||||||
|
|
||||||
getRefinedPromptFn: protectedProcedure
|
getModifiedPromptFn: protectedProcedure
|
||||||
.input(
|
.input(
|
||||||
z.object({
|
z.object({
|
||||||
id: z.string(),
|
id: z.string(),
|
||||||
instructions: z.string(),
|
instructions: z.string().optional(),
|
||||||
|
newModel: z
|
||||||
|
.object({
|
||||||
|
provider: ZodSupportedProvider,
|
||||||
|
model: z.string(),
|
||||||
|
})
|
||||||
|
.optional(),
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input, ctx }) => {
|
.mutation(async ({ input, ctx }) => {
|
||||||
@@ -298,14 +310,17 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
});
|
});
|
||||||
await requireCanModifyExperiment(existing.experimentId, ctx);
|
await requireCanModifyExperiment(existing.experimentId, ctx);
|
||||||
|
|
||||||
const constructedPrompt = await constructPrompt({ constructFn: existing.constructFn }, null);
|
const constructedPrompt = await parseConstructFn(existing.constructFn);
|
||||||
|
|
||||||
const promptConstructionFn = await deriveNewConstructFn(
|
if ("error" in constructedPrompt) {
|
||||||
existing,
|
return userError(constructedPrompt.error);
|
||||||
// @ts-expect-error TODO clean this up
|
}
|
||||||
constructedPrompt?.model as SupportedModel,
|
|
||||||
input.instructions,
|
const model = input.newModel
|
||||||
);
|
? modelProviders[input.newModel.provider].models[input.newModel.model]
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const promptConstructionFn = await deriveNewConstructFn(existing, model, input.instructions);
|
||||||
|
|
||||||
// TODO: Validate promptConstructionFn
|
// TODO: Validate promptConstructionFn
|
||||||
// TODO: Record in some sort of history
|
// TODO: Record in some sort of history
|
||||||
@@ -318,6 +333,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
z.object({
|
z.object({
|
||||||
id: z.string(),
|
id: z.string(),
|
||||||
constructFn: z.string(),
|
constructFn: z.string(),
|
||||||
|
streamScenarios: z.array(z.string()),
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input, ctx }) => {
|
.mutation(async ({ input, ctx }) => {
|
||||||
@@ -332,25 +348,10 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
throw new Error(`Prompt Variant with id ${input.id} does not exist`);
|
throw new Error(`Prompt Variant with id ${input.id} does not exist`);
|
||||||
}
|
}
|
||||||
|
|
||||||
let model = existing.model;
|
const parsedPrompt = await parseConstructFn(input.constructFn);
|
||||||
try {
|
|
||||||
const contructedPrompt = await constructPrompt({ constructFn: input.constructFn }, null);
|
|
||||||
|
|
||||||
if (!isObject(contructedPrompt)) {
|
if ("error" in parsedPrompt) {
|
||||||
return userError("Prompt is not an object");
|
return userError(parsedPrompt.error);
|
||||||
}
|
|
||||||
if (!("model" in contructedPrompt)) {
|
|
||||||
return userError("Prompt does not define a model");
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
typeof contructedPrompt.model !== "string" ||
|
|
||||||
!(contructedPrompt.model in OpenAIChatModel)
|
|
||||||
) {
|
|
||||||
return userError("Prompt defines an invalid model");
|
|
||||||
}
|
|
||||||
model = contructedPrompt.model;
|
|
||||||
} catch (e) {
|
|
||||||
return userError((e as Error).message);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a duplicate with only the config changed
|
// Create a duplicate with only the config changed
|
||||||
@@ -361,7 +362,9 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
sortIndex: existing.sortIndex,
|
sortIndex: existing.sortIndex,
|
||||||
uiId: existing.uiId,
|
uiId: existing.uiId,
|
||||||
constructFn: input.constructFn,
|
constructFn: input.constructFn,
|
||||||
model,
|
constructFnVersion: 2,
|
||||||
|
modelProvider: parsedPrompt.modelProvider,
|
||||||
|
model: parsedPrompt.model,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -388,7 +391,9 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
});
|
});
|
||||||
|
|
||||||
for (const scenario of scenarios) {
|
for (const scenario of scenarios) {
|
||||||
await generateNewCell(newVariant.id, scenario.id);
|
await generateNewCell(newVariant.id, scenario.id, {
|
||||||
|
stream: input.streamScenarios.includes(scenario.id),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return { status: "ok" } as const;
|
return { status: "ok" } as const;
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
||||||
import { prisma } from "~/server/db";
|
import { prisma } from "~/server/db";
|
||||||
|
import { queueQueryModel } from "~/server/tasks/queryModel.task";
|
||||||
import { generateNewCell } from "~/server/utils/generateNewCell";
|
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||||
import { queueLLMRetrievalTask } from "~/server/utils/queueLLMRetrievalTask";
|
|
||||||
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
|
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
|
||||||
|
|
||||||
export const scenarioVariantCellsRouter = createTRPCRouter({
|
export const scenarioVariantCellsRouter = createTRPCRouter({
|
||||||
@@ -19,7 +19,8 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
|||||||
});
|
});
|
||||||
await requireCanViewExperiment(experimentId, ctx);
|
await requireCanViewExperiment(experimentId, ctx);
|
||||||
|
|
||||||
return await prisma.scenarioVariantCell.findUnique({
|
const [cell, numTotalEvals] = await prisma.$transaction([
|
||||||
|
prisma.scenarioVariantCell.findUnique({
|
||||||
where: {
|
where: {
|
||||||
promptVariantId_testScenarioId: {
|
promptVariantId_testScenarioId: {
|
||||||
promptVariantId: input.variantId,
|
promptVariantId: input.variantId,
|
||||||
@@ -27,9 +28,12 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
include: {
|
include: {
|
||||||
modelOutput: {
|
modelResponses: {
|
||||||
|
where: {
|
||||||
|
outdated: false,
|
||||||
|
},
|
||||||
include: {
|
include: {
|
||||||
outputEvaluation: {
|
outputEvaluations: {
|
||||||
include: {
|
include: {
|
||||||
evaluation: {
|
evaluation: {
|
||||||
select: { label: true },
|
select: { label: true },
|
||||||
@@ -39,7 +43,21 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
}),
|
||||||
|
prisma.evaluation.count({
|
||||||
|
where: { experimentId },
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!cell) return null;
|
||||||
|
|
||||||
|
const lastResponse = cell.modelResponses?.[cell.modelResponses?.length - 1];
|
||||||
|
const evalsComplete = lastResponse?.outputEvaluations?.length === numTotalEvals;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...cell,
|
||||||
|
evalsComplete,
|
||||||
|
};
|
||||||
}),
|
}),
|
||||||
forceRefetch: protectedProcedure
|
forceRefetch: protectedProcedure
|
||||||
.input(
|
.input(
|
||||||
@@ -62,29 +80,20 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
|||||||
testScenarioId: input.scenarioId,
|
testScenarioId: input.scenarioId,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
include: {
|
|
||||||
modelOutput: true,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!cell) {
|
if (!cell) {
|
||||||
await generateNewCell(input.variantId, input.scenarioId);
|
await generateNewCell(input.variantId, input.scenarioId, { stream: true });
|
||||||
return true;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (cell.modelOutput) {
|
await prisma.modelResponse.updateMany({
|
||||||
// TODO: Maybe keep these around to show previous generations?
|
where: { scenarioVariantCellId: cell.id },
|
||||||
await prisma.modelOutput.delete({
|
data: {
|
||||||
where: { id: cell.modelOutput.id },
|
outdated: true,
|
||||||
});
|
},
|
||||||
}
|
|
||||||
|
|
||||||
await prisma.scenarioVariantCell.update({
|
|
||||||
where: { id: cell.id },
|
|
||||||
data: { retrievalStatus: "PENDING" },
|
|
||||||
});
|
});
|
||||||
|
|
||||||
await queueLLMRetrievalTask(cell.id);
|
await queueQueryModel(cell.id, true);
|
||||||
return true;
|
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user