Compare commits
16 Commits
scenario-s
...
world-cham
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
156f248c3a | ||
|
|
65a76cddc5 | ||
|
|
c88266bcd4 | ||
|
|
1bf9554eca | ||
|
|
1fb428ef4a | ||
|
|
6316eaae6d | ||
|
|
8513924ea5 | ||
|
|
51d64baae9 | ||
|
|
26b6fa4f0c | ||
|
|
807665fdc1 | ||
|
|
d6597d2c8a | ||
|
|
566d67bf48 | ||
|
|
d4fb8b689a | ||
|
|
98b231c8bd | ||
|
|
45afb1f1f4 | ||
|
|
223b990005 |
@@ -37,6 +37,7 @@ const config = {
|
|||||||
"warn",
|
"warn",
|
||||||
{ vars: "all", varsIgnorePattern: "^_", args: "after-used", argsIgnorePattern: "^_" },
|
{ vars: "all", varsIgnorePattern: "^_", args: "after-used", argsIgnorePattern: "^_" },
|
||||||
],
|
],
|
||||||
|
"react/no-unescaped-entities": "off",
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
src/codegen/openai.schema.json
|
*.schema.json
|
||||||
pnpm-lock.yaml
|
pnpm-lock.yaml
|
||||||
5
@types/nextjs-routes.d.ts
vendored
5
@types/nextjs-routes.d.ts
vendored
@@ -13,10 +13,13 @@ declare module "nextjs-routes" {
|
|||||||
export type Route =
|
export type Route =
|
||||||
| StaticRoute<"/account/signin">
|
| StaticRoute<"/account/signin">
|
||||||
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
|
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
|
||||||
|
| StaticRoute<"/api/experiments/og-image">
|
||||||
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
|
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
|
||||||
| DynamicRoute<"/experiments/[id]", { "id": string }>
|
| DynamicRoute<"/experiments/[id]", { "id": string }>
|
||||||
| StaticRoute<"/experiments">
|
| StaticRoute<"/experiments">
|
||||||
| StaticRoute<"/">;
|
| StaticRoute<"/">
|
||||||
|
| StaticRoute<"/world-champs">
|
||||||
|
| StaticRoute<"/world-champs/signup">;
|
||||||
|
|
||||||
interface StaticRoute<Pathname> {
|
interface StaticRoute<Pathname> {
|
||||||
pathname: Pathname;
|
pathname: Pathname;
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ Natively supports [OpenAI function calls](https://openai.com/blog/function-calli
|
|||||||
|
|
||||||
- All models available through the OpenAI [chat completion API](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
|
- All models available through the OpenAI [chat completion API](https://platform.openai.com/docs/guides/gpt/chat-completions-api)
|
||||||
- Llama2 [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat), [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat), [70b chat](https://replicate.com/replicate/llama70b-v2-chat).
|
- Llama2 [7b chat](https://replicate.com/a16z-infra/llama7b-v2-chat), [13b chat](https://replicate.com/a16z-infra/llama13b-v2-chat), [70b chat](https://replicate.com/replicate/llama70b-v2-chat).
|
||||||
|
- Anthropic's [Claude 1 Instant](https://www.anthropic.com/index/introducing-claude) and [Claude 2](https://www.anthropic.com/index/claude-2)
|
||||||
|
|
||||||
## Running Locally
|
## Running Locally
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@
|
|||||||
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'"
|
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@anthropic-ai/sdk": "^0.5.8",
|
||||||
"@apidevtools/json-schema-ref-parser": "^10.1.0",
|
"@apidevtools/json-schema-ref-parser": "^10.1.0",
|
||||||
"@babel/preset-typescript": "^7.22.5",
|
"@babel/preset-typescript": "^7.22.5",
|
||||||
"@babel/standalone": "^7.22.9",
|
"@babel/standalone": "^7.22.9",
|
||||||
@@ -40,6 +41,7 @@
|
|||||||
"@trpc/next": "^10.26.0",
|
"@trpc/next": "^10.26.0",
|
||||||
"@trpc/react-query": "^10.26.0",
|
"@trpc/react-query": "^10.26.0",
|
||||||
"@trpc/server": "^10.26.0",
|
"@trpc/server": "^10.26.0",
|
||||||
|
"@vercel/og": "^0.5.9",
|
||||||
"ast-types": "^0.14.2",
|
"ast-types": "^0.14.2",
|
||||||
"chroma-js": "^2.4.2",
|
"chroma-js": "^2.4.2",
|
||||||
"concurrently": "^8.2.0",
|
"concurrently": "^8.2.0",
|
||||||
@@ -105,6 +107,7 @@
|
|||||||
"@types/uuid": "^9.0.2",
|
"@types/uuid": "^9.0.2",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.59.6",
|
"@typescript-eslint/eslint-plugin": "^5.59.6",
|
||||||
"@typescript-eslint/parser": "^5.59.6",
|
"@typescript-eslint/parser": "^5.59.6",
|
||||||
|
"csv-parse": "^5.4.0",
|
||||||
"eslint": "^8.40.0",
|
"eslint": "^8.40.0",
|
||||||
"eslint-config-next": "^13.4.2",
|
"eslint-config-next": "^13.4.2",
|
||||||
"eslint-plugin-unused-imports": "^2.0.0",
|
"eslint-plugin-unused-imports": "^2.0.0",
|
||||||
|
|||||||
154
pnpm-lock.yaml
generated
154
pnpm-lock.yaml
generated
@@ -1,10 +1,13 @@
|
|||||||
lockfileVersion: '6.0'
|
lockfileVersion: '6.1'
|
||||||
|
|
||||||
settings:
|
settings:
|
||||||
autoInstallPeers: true
|
autoInstallPeers: true
|
||||||
excludeLinksFromLockfile: false
|
excludeLinksFromLockfile: false
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
|
'@anthropic-ai/sdk':
|
||||||
|
specifier: ^0.5.8
|
||||||
|
version: 0.5.8
|
||||||
'@apidevtools/json-schema-ref-parser':
|
'@apidevtools/json-schema-ref-parser':
|
||||||
specifier: ^10.1.0
|
specifier: ^10.1.0
|
||||||
version: 10.1.0
|
version: 10.1.0
|
||||||
@@ -62,6 +65,9 @@ dependencies:
|
|||||||
'@trpc/server':
|
'@trpc/server':
|
||||||
specifier: ^10.26.0
|
specifier: ^10.26.0
|
||||||
version: 10.26.0
|
version: 10.26.0
|
||||||
|
'@vercel/og':
|
||||||
|
specifier: ^0.5.9
|
||||||
|
version: 0.5.9
|
||||||
ast-types:
|
ast-types:
|
||||||
specifier: ^0.14.2
|
specifier: ^0.14.2
|
||||||
version: 0.14.2
|
version: 0.14.2
|
||||||
@@ -253,6 +259,9 @@ devDependencies:
|
|||||||
'@typescript-eslint/parser':
|
'@typescript-eslint/parser':
|
||||||
specifier: ^5.59.6
|
specifier: ^5.59.6
|
||||||
version: 5.59.6(eslint@8.40.0)(typescript@5.0.4)
|
version: 5.59.6(eslint@8.40.0)(typescript@5.0.4)
|
||||||
|
csv-parse:
|
||||||
|
specifier: ^5.4.0
|
||||||
|
version: 5.4.0
|
||||||
eslint:
|
eslint:
|
||||||
specifier: ^8.40.0
|
specifier: ^8.40.0
|
||||||
version: 8.40.0
|
version: 8.40.0
|
||||||
@@ -298,6 +307,22 @@ packages:
|
|||||||
'@jridgewell/gen-mapping': 0.3.3
|
'@jridgewell/gen-mapping': 0.3.3
|
||||||
'@jridgewell/trace-mapping': 0.3.18
|
'@jridgewell/trace-mapping': 0.3.18
|
||||||
|
|
||||||
|
/@anthropic-ai/sdk@0.5.8:
|
||||||
|
resolution: {integrity: sha512-iHenjcE2Q/az6VZiP1DueOSvKNRmxsly6Rx2yjJBoy7OBYVFGVjEdgs2mPQHtTX0ibKAR7tPq6F6MQbKDPWcKg==}
|
||||||
|
dependencies:
|
||||||
|
'@types/node': 18.16.0
|
||||||
|
'@types/node-fetch': 2.6.4
|
||||||
|
abort-controller: 3.0.0
|
||||||
|
agentkeepalive: 4.3.0
|
||||||
|
digest-fetch: 1.3.0
|
||||||
|
form-data-encoder: 1.7.2
|
||||||
|
formdata-node: 4.4.1
|
||||||
|
node-fetch: 2.6.12
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- encoding
|
||||||
|
- supports-color
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@apidevtools/json-schema-ref-parser@10.1.0:
|
/@apidevtools/json-schema-ref-parser@10.1.0:
|
||||||
resolution: {integrity: sha512-3e+viyMuXdrcK8v5pvP+SDoAQ77FH6OyRmuK48SZKmdHJRFm87RsSs8qm6kP39a/pOPURByJw+OXzQIqcfmKtA==}
|
resolution: {integrity: sha512-3e+viyMuXdrcK8v5pvP+SDoAQ77FH6OyRmuK48SZKmdHJRFm87RsSs8qm6kP39a/pOPURByJw+OXzQIqcfmKtA==}
|
||||||
engines: {node: '>= 16'}
|
engines: {node: '>= 16'}
|
||||||
@@ -2631,10 +2656,24 @@ packages:
|
|||||||
resolution: {integrity: sha512-PDNlhP/1vyTgmNyiucGqGCdXIp7HIkkvKO50si3y3PcceeHvqtiKPaH1iJdz63jCWMVMbj2MElSxXPOeBvEVIQ==}
|
resolution: {integrity: sha512-PDNlhP/1vyTgmNyiucGqGCdXIp7HIkkvKO50si3y3PcceeHvqtiKPaH1iJdz63jCWMVMbj2MElSxXPOeBvEVIQ==}
|
||||||
requiresBuild: true
|
requiresBuild: true
|
||||||
|
|
||||||
|
/@resvg/resvg-wasm@2.4.1:
|
||||||
|
resolution: {integrity: sha512-yi6R0HyHtsoWTRA06Col4WoDs7SvlXU3DLMNP2bdAgs7HK18dTEVl1weXgxRzi8gwLteGUbIg29zulxIB3GSdg==}
|
||||||
|
engines: {node: '>= 10'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@rushstack/eslint-patch@1.3.2:
|
/@rushstack/eslint-patch@1.3.2:
|
||||||
resolution: {integrity: sha512-V+MvGwaHH03hYhY+k6Ef/xKd6RYlc4q8WBx+2ANmipHJcKuktNcI/NgEsJgdSUF6Lw32njT6OnrRsKYCdgHjYw==}
|
resolution: {integrity: sha512-V+MvGwaHH03hYhY+k6Ef/xKd6RYlc4q8WBx+2ANmipHJcKuktNcI/NgEsJgdSUF6Lw32njT6OnrRsKYCdgHjYw==}
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/@shuding/opentype.js@1.4.0-beta.0:
|
||||||
|
resolution: {integrity: sha512-3NgmNyH3l/Hv6EvsWJbsvpcpUba6R8IREQ83nH83cyakCw7uM1arZKNfHwv1Wz6jgqrF/j4x5ELvR6PnK9nTcA==}
|
||||||
|
engines: {node: '>= 8.0.0'}
|
||||||
|
hasBin: true
|
||||||
|
dependencies:
|
||||||
|
fflate: 0.7.4
|
||||||
|
string.prototype.codepointat: 0.2.1
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@sinclair/typebox@0.27.8:
|
/@sinclair/typebox@0.27.8:
|
||||||
resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==}
|
resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==}
|
||||||
dev: true
|
dev: true
|
||||||
@@ -3164,6 +3203,15 @@ packages:
|
|||||||
eslint-visitor-keys: 3.4.1
|
eslint-visitor-keys: 3.4.1
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/@vercel/og@0.5.9:
|
||||||
|
resolution: {integrity: sha512-CtjaV/BVHtNCjRtxGqn8Q6AKFLqcG34Byxr91+mY+4eqyp/09LVe9jEeY9WXjbaKvu8syWPMteTpY+YQUQYzSg==}
|
||||||
|
engines: {node: '>=16'}
|
||||||
|
dependencies:
|
||||||
|
'@resvg/resvg-wasm': 2.4.1
|
||||||
|
satori: 0.10.1
|
||||||
|
yoga-wasm-web: 0.3.3
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@vitest/expect@0.33.0:
|
/@vitest/expect@0.33.0:
|
||||||
resolution: {integrity: sha512-sVNf+Gla3mhTCxNJx+wJLDPp/WcstOe0Ksqz4Vec51MmgMth/ia0MGFEkIZmVGeTL5HtjYR4Wl/ZxBxBXZJTzQ==}
|
resolution: {integrity: sha512-sVNf+Gla3mhTCxNJx+wJLDPp/WcstOe0Ksqz4Vec51MmgMth/ia0MGFEkIZmVGeTL5HtjYR4Wl/ZxBxBXZJTzQ==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -3610,6 +3658,11 @@ packages:
|
|||||||
resolution: {integrity: sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA==}
|
resolution: {integrity: sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/base64-js@0.0.8:
|
||||||
|
resolution: {integrity: sha512-3XSA2cR/h/73EzlXXdU6YNycmYI7+kicTxks4eJg2g39biHR84slg2+des+p7iHYhbRg/udIS4TD53WabcOUkw==}
|
||||||
|
engines: {node: '>= 0.4'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/base64-js@1.5.1:
|
/base64-js@1.5.1:
|
||||||
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
|
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
|
||||||
dev: false
|
dev: false
|
||||||
@@ -3731,6 +3784,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
|
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
|
|
||||||
|
/camelize@1.0.1:
|
||||||
|
resolution: {integrity: sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/caniuse-lite@1.0.30001517:
|
/caniuse-lite@1.0.30001517:
|
||||||
resolution: {integrity: sha512-Vdhm5S11DaFVLlyiKu4hiUTkpZu+y1KA/rZZqVQfOD5YdDT/eQKlkt7NaE0WGOFgX32diqt9MiP9CAiFeRklaA==}
|
resolution: {integrity: sha512-Vdhm5S11DaFVLlyiKu4hiUTkpZu+y1KA/rZZqVQfOD5YdDT/eQKlkt7NaE0WGOFgX32diqt9MiP9CAiFeRklaA==}
|
||||||
|
|
||||||
@@ -3999,12 +4056,33 @@ packages:
|
|||||||
resolution: {integrity: sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==}
|
resolution: {integrity: sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/css-background-parser@0.1.0:
|
||||||
|
resolution: {integrity: sha512-2EZLisiZQ+7m4wwur/qiYJRniHX4K5Tc9w93MT3AS0WS1u5kaZ4FKXlOTBhOjc+CgEgPiGY+fX1yWD8UwpEqUA==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/css-box-model@1.2.1:
|
/css-box-model@1.2.1:
|
||||||
resolution: {integrity: sha512-a7Vr4Q/kd/aw96bnJG332W9V9LkJO69JRcaCYDUqjp6/z0w6VcZjgAcTbgFxEPfBgdnAwlh3iwu+hLopa+flJw==}
|
resolution: {integrity: sha512-a7Vr4Q/kd/aw96bnJG332W9V9LkJO69JRcaCYDUqjp6/z0w6VcZjgAcTbgFxEPfBgdnAwlh3iwu+hLopa+flJw==}
|
||||||
dependencies:
|
dependencies:
|
||||||
tiny-invariant: 1.3.1
|
tiny-invariant: 1.3.1
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/css-box-shadow@1.0.0-3:
|
||||||
|
resolution: {integrity: sha512-9jaqR6e7Ohds+aWwmhe6wILJ99xYQbfmK9QQB9CcMjDbTxPZjwEmUQpU91OG05Xgm8BahT5fW+svbsQGjS/zPg==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
|
/css-color-keywords@1.0.0:
|
||||||
|
resolution: {integrity: sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg==}
|
||||||
|
engines: {node: '>=4'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
|
/css-to-react-native@3.2.0:
|
||||||
|
resolution: {integrity: sha512-e8RKaLXMOFii+02mOlqwjbD00KSEKqblnpO9e++1aXS1fPQOpS1YoqdVHBqPjHNoxeF2mimzVqawm2KCbEdtHQ==}
|
||||||
|
dependencies:
|
||||||
|
camelize: 1.0.1
|
||||||
|
css-color-keywords: 1.0.0
|
||||||
|
postcss-value-parser: 4.2.0
|
||||||
|
dev: false
|
||||||
|
|
||||||
/csstype@2.6.21:
|
/csstype@2.6.21:
|
||||||
resolution: {integrity: sha512-Z1PhmomIfypOpoMjRQB70jfvy/wxT50qW08YXO5lMIJkrdq4yOTR+AW7FqutScmB9NkLwxo+jU+kZLbofZZq/w==}
|
resolution: {integrity: sha512-Z1PhmomIfypOpoMjRQB70jfvy/wxT50qW08YXO5lMIJkrdq4yOTR+AW7FqutScmB9NkLwxo+jU+kZLbofZZq/w==}
|
||||||
dev: false
|
dev: false
|
||||||
@@ -4012,6 +4090,10 @@ packages:
|
|||||||
/csstype@3.1.2:
|
/csstype@3.1.2:
|
||||||
resolution: {integrity: sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==}
|
resolution: {integrity: sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==}
|
||||||
|
|
||||||
|
/csv-parse@5.4.0:
|
||||||
|
resolution: {integrity: sha512-JiQosUWiOFgp4hQn0an+SBoV9IKdqzhROM0iiN4LB7UpfJBlsSJlWl9nq4zGgxgMAzHJ6V4t29VAVD+3+2NJAg==}
|
||||||
|
dev: true
|
||||||
|
|
||||||
/d@1.0.1:
|
/d@1.0.1:
|
||||||
resolution: {integrity: sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==}
|
resolution: {integrity: sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -4205,6 +4287,10 @@ packages:
|
|||||||
/electron-to-chromium@1.4.465:
|
/electron-to-chromium@1.4.465:
|
||||||
resolution: {integrity: sha512-XQcuHvEJRMU97UJ75e170mgcITZoz0lIyiaVjk6R+NMTJ8KBIvUHYd1779swgOppUlzxR+JsLpq59PumaXS1jQ==}
|
resolution: {integrity: sha512-XQcuHvEJRMU97UJ75e170mgcITZoz0lIyiaVjk6R+NMTJ8KBIvUHYd1779swgOppUlzxR+JsLpq59PumaXS1jQ==}
|
||||||
|
|
||||||
|
/emoji-regex@10.2.1:
|
||||||
|
resolution: {integrity: sha512-97g6QgOk8zlDRdgq1WxwgTMgEWGVAQvB5Fdpgc1MkNy56la5SKP9GsMXKDOdqwn90/41a8yPwIGk1Y6WVbeMQA==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/emoji-regex@8.0.0:
|
/emoji-regex@8.0.0:
|
||||||
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
|
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
|
||||||
dev: false
|
dev: false
|
||||||
@@ -4920,6 +5006,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
|
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/fflate@0.7.4:
|
||||||
|
resolution: {integrity: sha512-5u2V/CDW15QM1XbbgS+0DfPxVB+jUKhWEKuuFuHncbk3tEEqzmoXL+2KyOFuKGqOnmdIy0/davWF1CkuwtibCw==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/file-entry-cache@6.0.1:
|
/file-entry-cache@6.0.1:
|
||||||
resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
|
resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
|
||||||
engines: {node: ^10.12.0 || >=12.0.0}
|
engines: {node: ^10.12.0 || >=12.0.0}
|
||||||
@@ -5305,6 +5395,11 @@ packages:
|
|||||||
space-separated-tokens: 1.1.5
|
space-separated-tokens: 1.1.5
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/hex-rgb@4.3.0:
|
||||||
|
resolution: {integrity: sha512-Ox1pJVrDCyGHMG9CFg1tmrRUMRPRsAWYc/PinY0XzJU4K7y7vjNoLKIQ7BR5UJMCxNN8EM1MNDmHWA/B3aZUuw==}
|
||||||
|
engines: {node: '>=6'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/highlight.js@10.7.3:
|
/highlight.js@10.7.3:
|
||||||
resolution: {integrity: sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==}
|
resolution: {integrity: sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==}
|
||||||
dev: false
|
dev: false
|
||||||
@@ -5770,6 +5865,13 @@ packages:
|
|||||||
type-check: 0.4.0
|
type-check: 0.4.0
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/linebreak@1.1.0:
|
||||||
|
resolution: {integrity: sha512-MHp03UImeVhB7XZtjd0E4n6+3xr5Dq/9xI/5FptGk5FrbDR3zagPa2DS6U8ks/3HjbKWG9Q1M2ufOzxV2qLYSQ==}
|
||||||
|
dependencies:
|
||||||
|
base64-js: 0.0.8
|
||||||
|
unicode-trie: 2.0.0
|
||||||
|
dev: false
|
||||||
|
|
||||||
/lines-and-columns@1.2.4:
|
/lines-and-columns@1.2.4:
|
||||||
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
||||||
dev: false
|
dev: false
|
||||||
@@ -6374,12 +6476,23 @@ packages:
|
|||||||
resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==}
|
resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/pako@0.2.9:
|
||||||
|
resolution: {integrity: sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/parent-module@1.0.1:
|
/parent-module@1.0.1:
|
||||||
resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
|
resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
dependencies:
|
dependencies:
|
||||||
callsites: 3.1.0
|
callsites: 3.1.0
|
||||||
|
|
||||||
|
/parse-css-color@0.2.1:
|
||||||
|
resolution: {integrity: sha512-bwS/GGIFV3b6KS4uwpzCFj4w297Yl3uqnSgIPsoQkx7GMLROXfMnWvxfNkL0oh8HVhZA4hvJoEoEIqonfJ3BWg==}
|
||||||
|
dependencies:
|
||||||
|
color-name: 1.1.4
|
||||||
|
hex-rgb: 4.3.0
|
||||||
|
dev: false
|
||||||
|
|
||||||
/parse-entities@2.0.0:
|
/parse-entities@2.0.0:
|
||||||
resolution: {integrity: sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==}
|
resolution: {integrity: sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -6546,6 +6659,10 @@ packages:
|
|||||||
engines: {node: '>=4'}
|
engines: {node: '>=4'}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/postcss-value-parser@4.2.0:
|
||||||
|
resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/postcss@8.4.14:
|
/postcss@8.4.14:
|
||||||
resolution: {integrity: sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==}
|
resolution: {integrity: sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==}
|
||||||
engines: {node: ^10 || ^12 || >=14}
|
engines: {node: ^10 || ^12 || >=14}
|
||||||
@@ -7122,6 +7239,22 @@ packages:
|
|||||||
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
|
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/satori@0.10.1:
|
||||||
|
resolution: {integrity: sha512-F4bTCkDp931tLb7+UCNPBuSQwXhikrUkI4fBQo6fA8lF0Evqqgg3nDyUpRktQpR5Ry1DIiIVqLyEwkAms87ykg==}
|
||||||
|
engines: {node: '>=16'}
|
||||||
|
dependencies:
|
||||||
|
'@shuding/opentype.js': 1.4.0-beta.0
|
||||||
|
css-background-parser: 0.1.0
|
||||||
|
css-box-shadow: 1.0.0-3
|
||||||
|
css-to-react-native: 3.2.0
|
||||||
|
emoji-regex: 10.2.1
|
||||||
|
escape-html: 1.0.3
|
||||||
|
linebreak: 1.1.0
|
||||||
|
parse-css-color: 0.2.1
|
||||||
|
postcss-value-parser: 4.2.0
|
||||||
|
yoga-wasm-web: 0.3.3
|
||||||
|
dev: false
|
||||||
|
|
||||||
/scheduler@0.23.0:
|
/scheduler@0.23.0:
|
||||||
resolution: {integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==}
|
resolution: {integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -7349,6 +7482,10 @@ packages:
|
|||||||
strip-ansi: 6.0.1
|
strip-ansi: 6.0.1
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/string.prototype.codepointat@0.2.1:
|
||||||
|
resolution: {integrity: sha512-2cBVCj6I4IOvEnjgO/hWqXjqBGsY+zwPmHl12Srk9IXSZ56Jwwmy+66XO5Iut/oQVR7t5ihYdLB0GMa4alEUcg==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/string.prototype.matchall@4.0.8:
|
/string.prototype.matchall@4.0.8:
|
||||||
resolution: {integrity: sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==}
|
resolution: {integrity: sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -7575,6 +7712,10 @@ packages:
|
|||||||
globrex: 0.1.2
|
globrex: 0.1.2
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/tiny-inflate@1.0.3:
|
||||||
|
resolution: {integrity: sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/tiny-invariant@1.3.1:
|
/tiny-invariant@1.3.1:
|
||||||
resolution: {integrity: sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw==}
|
resolution: {integrity: sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw==}
|
||||||
dev: false
|
dev: false
|
||||||
@@ -7781,6 +7922,13 @@ packages:
|
|||||||
busboy: 1.6.0
|
busboy: 1.6.0
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/unicode-trie@2.0.0:
|
||||||
|
resolution: {integrity: sha512-x7bc76x0bm4prf1VLg79uhAzKw8DVboClSN5VxJuQ+LKDOVEW9CdH+VY7SP+vX7xCYQqzzgQpFqz15zeLvAtZQ==}
|
||||||
|
dependencies:
|
||||||
|
pako: 0.2.9
|
||||||
|
tiny-inflate: 1.0.3
|
||||||
|
dev: false
|
||||||
|
|
||||||
/unpipe@1.0.0:
|
/unpipe@1.0.0:
|
||||||
resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==}
|
resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==}
|
||||||
engines: {node: '>= 0.8'}
|
engines: {node: '>= 0.8'}
|
||||||
@@ -8286,6 +8434,10 @@ packages:
|
|||||||
engines: {node: '>=12.20'}
|
engines: {node: '>=12.20'}
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/yoga-wasm-web@0.3.3:
|
||||||
|
resolution: {integrity: sha512-N+d4UJSJbt/R3wqY7Coqs5pcV0aUj2j9IaQ3rNj9bVCLld8tTGKRa2USARjnvZJWVx1NDmQev8EknoczaOQDOA==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/zod@3.21.4:
|
/zod@3.21.4:
|
||||||
resolution: {integrity: sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==}
|
resolution: {integrity: sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|||||||
84
prisma/datasets/validated_tweets.csv
Normal file
84
prisma/datasets/validated_tweets.csv
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
Text,sentiment,emotion
|
||||||
|
@dell your customer service is horrible especially agent syedfaisal who has made this experience of purchasing a new computer downright awful and I’ll reconsider ever buying a Dell in the future @DellTech,negative,anger
|
||||||
|
@zacokalo @Dell @DellCares @Dell give the man what he paid for!,neutral,anger
|
||||||
|
"COOKING STREAM DAY!!! Ty to @Alienware for sponsoring this stream! I’ll be making a bunch of Japanese Alien themed foods hehe
|
||||||
|
|
||||||
|
Come check it out! https://t.co/m06tJQ06zk
|
||||||
|
|
||||||
|
#alienwarepartner #intelgaming @Dell @IntelGaming https://t.co/qOdQX2E8VD",positive,joy
|
||||||
|
@emijuju_ @Alienware @Dell @intel Beautiful 😍❤️😻,positive,joy
|
||||||
|
"What's your biggest data management challenge? • Cloud complexity? • Lengthy tech refresh cycles? • Capital budget constraints? Solve your challenges with as-a-Storage. Get simplicity, agility & control with @Dell #APEX. https://t.co/mCblMtH931 https://t.co/eepKNZ4Ai3",neutral,optimism
|
||||||
|
"This week we were at the ""Top Gun"" themed @Dell Product Expo. Eddie Muñoz met Maverick look-alike, California Tom Cruise (Jerome LeBlanc)!
|
||||||
|
|
||||||
|
""I feel the need, the need for speed."" - Maverick
|
||||||
|
#topgun #topgunmaverick #dell #delltechnologies #lockncharge https://t.co/QHYH2EbMjq",positive,joy
|
||||||
|
"Itsss been more than a week...i m following up with dell for troubleshootings...my https://t.co/lWhg2YKhQa suffering so as my hard earned money...hightly disappointed...contd..
|
||||||
|
@DellCares @Dell",negative,sadness
|
||||||
|
"@ashu_k7 @Dell Pathetic!!!!! I Dont mind taking legal action, this is deficency of service for which the customer is nt getting help..",negative,anger
|
||||||
|
@ashu_k7 @Dell Making life unhappy is the new tag line of #Dell,negative,sadness
|
||||||
|
"@Dell If you are buying a Dell, make sure you are making your life hell.
|
||||||
|
Better buy other laptops. If you wanted to opt for Dell better opt for garbage on the streets.",negative,anger
|
||||||
|
"MY DESK'S FINAL FORM? Seriously, I'm finally happy with my monitor setup here... and I'll keep this setup whenever I move... FOREVER. What do you think?
|
||||||
|
https://t.co/WJZ2JXtOnX
|
||||||
|
@Alienware @Dell cheers. https://t.co/6Whhldfpv0",positive,joy
|
||||||
|
"@Dell Dell Alienware computer has had software problems with SupportAssist since purchase. Dell, despite paying for Premium Support, has never fixed issues. Latest solution was to erase everything and reload....SupportAssist still doesn't work.",negative,anger
|
||||||
|
"HUGE congratulations to Startup Battle 3.0 winner ➡️ @Ox_Fulfillment x @cyborgcharu for being featured in @BusinessInsider & @Dell showcasing the journey at Ox! 🚀🚀🚀
|
||||||
|
|
||||||
|
We love to see our portfolio companies continuing to BUILD SOMETHING FROM NOTHING! 🔥 https://t.co/awBkn5ippB",positive,joy
|
||||||
|
@Dell happy Friday!,positive,joy
|
||||||
|
"@intel Core i5 1135G7 - 4732 points
|
||||||
|
@intel Core i5 1235 - 6619 points
|
||||||
|
@Dell Latitude 5420 x 5430.
|
||||||
|
Cinebench R23. Good job Intel!",positive,joy
|
||||||
|
@Dell india we purchased 52 docking station and we have around 100 users using dell laptop as well as dell monitor now they are refusing to replace my faulty product and disconnecting my every call....,negative,anger
|
||||||
|
"It's another year ans another day But cant fill it in yet the child hood dreams.
|
||||||
|
It's my birthdy today. Can anyone of you guys bless me with a simplest gaming oc that can run
|
||||||
|
@DOTA2 ?
|
||||||
|
@Dell @HP @VastGG @Acer @Alienware @Lenovo @toshiba @IBM @Fujitsu_Global @NEC https://t.co/69G8tL9sN8",neutral,joy
|
||||||
|
"@idoccor @Dell That's always the decision—wait, or, look elsewhere. In this case, I think I unfortunately need to wait since there are only two monitors with these specs and I don't like the other one 😂",negative,sadness
|
||||||
|
"@MichaelDell @Dell @DellCares For how long this will continue. It is high time you either fix the problem for good or replace the complete laptop. Spent over 60+ hours with Customer Care teams, which is not helping. Cannot keep going on like this.",negative,anger
|
||||||
|
"@Dell @DellCares but no, not really",neutral,sadness
|
||||||
|
"Business innovation requires insight, agility and efficiency. How do you get there? RP PRO, LLC recommends starting by proactively managing IT infrastructure with #OpenManage Systems from @Dell. https://t.co/fBcK1lfFMu https://t.co/xWHLkkHCjn",neutral,optimism
|
||||||
|
@Dell Yessirrrrr #NationalCoffeeDay,positive,joy
|
||||||
|
"New blog post from @Dell shared on https://t.co/EgfPChB8AT
|
||||||
|
|
||||||
|
Re-routing Our Connected and Autonomous Future https://t.co/AW8EHQrbd6
|
||||||
|
|
||||||
|
#future #futuretech #techinnovation https://t.co/koX8stKPsr",neutral,joy
|
||||||
|
"In a free-market economy, the folks @IronMountain can set prices as they see fit. Their customers are also free to find better prices at competitors like @Dell
|
||||||
|
@H3CGlobal @HPE
|
||||||
|
https://t.co/reZ56DNTBI",neutral,optimism
|
||||||
|
"Delighted to chat with many of our partners here in person at @Intel Innovation! @Dell, @Lenovo, @Supermicro_SMCI, @QuantaQCT #IntelON https://t.co/BxIeGW8deN",positive,joy
|
||||||
|
"A special gracias to our Startup Chica San Antonio 2022 sponsors @eBay, @jcpenney, @Barbie, @HEB, @Dell, @Honda, @SouthsideSATX💜✨ https://t.co/lZ6WWkziHl",positive,joy
|
||||||
|
"When your team decides to start supporting developers, your #ops must change too. More from @cote and @Dell Developer Community Manager @barton808: https://t.co/W6f1oMiTgV",neutral,optimism
|
||||||
|
@EmDStowers @LASERGIANT1 @ohwormongod @Ludovician_Vega @Dell our boy snitchin,neutral,anger
|
||||||
|
A 1st place dmi:Design Value Award goes to @Dell for a packaging modernization initiative that helped them get closer to their corporate Moonshot Sustainability Goal of 100% recycled or renewable packaging by 2030. More at https://t.co/dnhZWWLCQC #designvalue #DVA22,positive,optimism
|
||||||
|
Reducing deployment and maintenance complexity is the goal behind @dell and @WindRiver's new collaboration. https://t.co/2PxQgPuHUU,positive,optimism
|
||||||
|
@jaserhunter @Dell Love the sales pitch lol,positive,joy
|
||||||
|
@Dell india we purchased 52 docking station and we have around 100 users using dell laptop as well as dell monitor now they are refusing to replace my faulty product and disconnecting my every call....,negative,anger
|
||||||
|
@ashu_k7 @Dell One more example.. their technical support is also worse. https://t.co/20atSgI4fg,negative,anger
|
||||||
|
*angry screeches about @Dell proprietary MBR windows 8.1 partitions not being able to save as an img in clonezilla *,negative,anger
|
||||||
|
@socialitebooks @BBYC_Gamers @Dell @Alienware @BestBuyCanada @intelcanada Congratulations!!!,positive,joy
|
||||||
|
"Thank you to the @dell team for coming out to volunteer today! We truly appreciate your hard work and look forward to seeing you again soon!
|
||||||
|
|
||||||
|
If you and your team are interested in helping out at the UMLAUF, visit our website for more information: https://t.co/lVfsZT2ogS https://t.co/eLz0FY0y4M",positive,joy
|
||||||
|
"@TheCaramelGamer @intel @bravadogaming @Intel_Africa @Dell @DellTech @DellTechMEA @Alienware @IntelUK we love to see it.
|
||||||
|
|
||||||
|
Also also actually actually whoever did that artwork? 🔥🔥🔥 am a fan.",positive,joy
|
||||||
|
"LOVING MY DELL 2 IN 1 LAPTOP
|
||||||
|
YAYY 🥳🥳
|
||||||
|
@Dell #DellInspiron #DellLaptop https://t.co/vib96jf3tC",positive,joy
|
||||||
|
@Azure @OracleItalia @AWS_Italy @lenovoitalia @Dell discussing the future of #HPC during the #hpcroundtable22 in Turin today #highperformancecomputing https://t.co/jJ1WqBulPF,neutral,joy
|
||||||
|
Attracting talent @AmericanChamber. @marg_cola @Dell speaks of quality of life connectivity and the Opportunity for development being so crucial. Housing availability is now impacting on decision making for potential candidates. #WhyCork,positive,optimism
|
||||||
|
.@Dell partners with @WindRiver on modular cloud-native telecommunications infrastructure https://t.co/4SWATspwCP @SiliconANGLE @Mike_Wheatley @holgermu @constellationr,neutral,joy
|
||||||
|
@Dell Not buy Dell Inspiron laptop,neutral,sadness
|
||||||
|
"@dell #delltechforum reminding us IDC have predicted that by 2024, 50% of everything we consume in technology will be as a service https://t.co/3UBiZJX0LE",neutral,optimism
|
||||||
|
@RachMurph @HETTShow @Dell Thank you for coming! Great evening,positive,joy
|
||||||
|
Congratulations to Jason M of Moncton NB on winning a @Dell @Alienware m15 R7 15.6″ gaming laptop from @BestBuyCanada and @intelcanada's gaming days #contest on the blog. Visit https://t.co/VryaY5Rvv9 to learn about tech and for chances to win new tech. https://t.co/T6n0dzF6oL,positive,joy
|
||||||
|
@MattVisiwig @Dell Sour taste for sure 😶 But don't let ego distract you from what you really want to buy 😁,neutral,optimism
|
||||||
|
"Massive thank you goes to sponsors @HendersonLoggie @lindsaysnews @Dell @unity, all of our fantastic judges and mentors and the team at @EGX and @ExCeLLondon.
|
||||||
|
|
||||||
|
Big congratulations also to all of our other @AbertayDare teams - an amazing year! #Dare2022 https://t.co/jYe4agO7lW",positive,joy
|
||||||
|
"@timetcetera @rahaug Nah, I just need @Dell to start paying me comissions 😂",neutral,joy
|
||||||
|
"""Whether you’re an engineer, a designer, or work in supply chain management or sales, there are always opportunities to think about sustainability and how you can do things more efficiently."" 👏 — Oliver Campbell, Director of Packaging Engineering, @Dell https://t.co/vUJLTWNFwP https://t.co/GJWAzGfAxJ",positive,optimism
|
||||||
|
"Hi, my name is @listerepvp and I support @Dell, always.",positive,joy
|
||||||
|
@@ -0,0 +1,52 @@
|
|||||||
|
-- DropForeignKey
|
||||||
|
ALTER TABLE "ModelOutput" DROP CONSTRAINT "ModelOutput_scenarioVariantCellId_fkey";
|
||||||
|
|
||||||
|
-- DropForeignKey
|
||||||
|
ALTER TABLE "OutputEvaluation" DROP CONSTRAINT "OutputEvaluation_modelOutputId_fkey";
|
||||||
|
|
||||||
|
-- DropIndex
|
||||||
|
DROP INDEX "OutputEvaluation_modelOutputId_evaluationId_key";
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "OutputEvaluation" RENAME COLUMN "modelOutputId" TO "modelResponseId";
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "ScenarioVariantCell" DROP COLUMN "retryTime",
|
||||||
|
DROP COLUMN "statusCode",
|
||||||
|
ADD COLUMN "jobQueuedAt" TIMESTAMP(3),
|
||||||
|
ADD COLUMN "jobStartedAt" TIMESTAMP(3);
|
||||||
|
|
||||||
|
ALTER TABLE "ModelOutput" RENAME TO "ModelResponse";
|
||||||
|
|
||||||
|
ALTER TABLE "ModelResponse"
|
||||||
|
ADD COLUMN "requestedAt" TIMESTAMP(3),
|
||||||
|
ADD COLUMN "receivedAt" TIMESTAMP(3),
|
||||||
|
ADD COLUMN "statusCode" INTEGER,
|
||||||
|
ADD COLUMN "errorMessage" TEXT,
|
||||||
|
ADD COLUMN "retryTime" TIMESTAMP(3),
|
||||||
|
ADD COLUMN "outdated" BOOLEAN NOT NULL DEFAULT false;
|
||||||
|
|
||||||
|
-- 3. Remove the unnecessary column
|
||||||
|
ALTER TABLE "ModelResponse"
|
||||||
|
DROP COLUMN "timeToComplete";
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "ModelResponse" RENAME CONSTRAINT "ModelOutput_pkey" TO "ModelResponse_pkey";
|
||||||
|
ALTER TABLE "ModelResponse" ALTER COLUMN "output" DROP NOT NULL;
|
||||||
|
|
||||||
|
-- DropIndex
|
||||||
|
DROP INDEX "ModelOutput_scenarioVariantCellId_key";
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "ModelResponse" ADD CONSTRAINT "ModelResponse_scenarioVariantCellId_fkey" FOREIGN KEY ("scenarioVariantCellId") REFERENCES "ScenarioVariantCell"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- RenameIndex
|
||||||
|
ALTER INDEX "ModelOutput_inputHash_idx" RENAME TO "ModelResponse_inputHash_idx";
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "OutputEvaluation_modelResponseId_evaluationId_key" ON "OutputEvaluation"("modelResponseId", "evaluationId");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "OutputEvaluation" ADD CONSTRAINT "OutputEvaluation_modelResponseId_fkey" FOREIGN KEY ("modelResponseId") REFERENCES "ModelResponse"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "WorldChampEntrant" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"userId" UUID NOT NULL,
|
||||||
|
"approved" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "WorldChampEntrant_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "WorldChampEntrant_userId_key" ON "WorldChampEntrant"("userId");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "WorldChampEntrant" ADD CONSTRAINT "WorldChampEntrant_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "User" ADD COLUMN "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
ADD COLUMN "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP;
|
||||||
@@ -90,12 +90,11 @@ enum CellRetrievalStatus {
|
|||||||
model ScenarioVariantCell {
|
model ScenarioVariantCell {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
|
||||||
statusCode Int?
|
|
||||||
errorMessage String?
|
|
||||||
retryTime DateTime?
|
|
||||||
retrievalStatus CellRetrievalStatus @default(COMPLETE)
|
retrievalStatus CellRetrievalStatus @default(COMPLETE)
|
||||||
|
jobQueuedAt DateTime?
|
||||||
modelOutput ModelOutput?
|
jobStartedAt DateTime?
|
||||||
|
modelResponses ModelResponse[]
|
||||||
|
errorMessage String? // Contains errors that occurred independently of model responses
|
||||||
|
|
||||||
promptVariantId String @db.Uuid
|
promptVariantId String @db.Uuid
|
||||||
promptVariant PromptVariant @relation(fields: [promptVariantId], references: [id], onDelete: Cascade)
|
promptVariant PromptVariant @relation(fields: [promptVariantId], references: [id], onDelete: Cascade)
|
||||||
@@ -110,15 +109,20 @@ model ScenarioVariantCell {
|
|||||||
@@unique([promptVariantId, testScenarioId])
|
@@unique([promptVariantId, testScenarioId])
|
||||||
}
|
}
|
||||||
|
|
||||||
model ModelOutput {
|
model ModelResponse {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
|
||||||
inputHash String
|
inputHash String
|
||||||
output Json
|
requestedAt DateTime?
|
||||||
timeToComplete Int @default(0)
|
receivedAt DateTime?
|
||||||
|
output Json?
|
||||||
cost Float?
|
cost Float?
|
||||||
promptTokens Int?
|
promptTokens Int?
|
||||||
completionTokens Int?
|
completionTokens Int?
|
||||||
|
statusCode Int?
|
||||||
|
errorMessage String?
|
||||||
|
retryTime DateTime?
|
||||||
|
outdated Boolean @default(false)
|
||||||
|
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
@@ -127,7 +131,6 @@ model ModelOutput {
|
|||||||
scenarioVariantCell ScenarioVariantCell @relation(fields: [scenarioVariantCellId], references: [id], onDelete: Cascade)
|
scenarioVariantCell ScenarioVariantCell @relation(fields: [scenarioVariantCellId], references: [id], onDelete: Cascade)
|
||||||
outputEvaluations OutputEvaluation[]
|
outputEvaluations OutputEvaluation[]
|
||||||
|
|
||||||
@@unique([scenarioVariantCellId])
|
|
||||||
@@index([inputHash])
|
@@index([inputHash])
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -159,8 +162,8 @@ model OutputEvaluation {
|
|||||||
result Float
|
result Float
|
||||||
details String?
|
details String?
|
||||||
|
|
||||||
modelOutputId String @db.Uuid
|
modelResponseId String @db.Uuid
|
||||||
modelOutput ModelOutput @relation(fields: [modelOutputId], references: [id], onDelete: Cascade)
|
modelResponse ModelResponse @relation(fields: [modelResponseId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
evaluationId String @db.Uuid
|
evaluationId String @db.Uuid
|
||||||
evaluation Evaluation @relation(fields: [evaluationId], references: [id], onDelete: Cascade)
|
evaluation Evaluation @relation(fields: [evaluationId], references: [id], onDelete: Cascade)
|
||||||
@@ -168,7 +171,7 @@ model OutputEvaluation {
|
|||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
@@unique([modelOutputId, evaluationId])
|
@@unique([modelResponseId, evaluationId])
|
||||||
}
|
}
|
||||||
|
|
||||||
model Organization {
|
model Organization {
|
||||||
@@ -205,6 +208,20 @@ model OrganizationUser {
|
|||||||
@@unique([organizationId, userId])
|
@@unique([organizationId, userId])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model WorldChampEntrant {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
|
||||||
|
userId String @db.Uuid
|
||||||
|
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
approved Boolean @default(false)
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
|
@@unique([userId])
|
||||||
|
}
|
||||||
|
|
||||||
model Account {
|
model Account {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
userId String @db.Uuid
|
userId String @db.Uuid
|
||||||
@@ -242,6 +259,10 @@ model User {
|
|||||||
sessions Session[]
|
sessions Session[]
|
||||||
organizationUsers OrganizationUser[]
|
organizationUsers OrganizationUser[]
|
||||||
organizations Organization[]
|
organizations Organization[]
|
||||||
|
worldChampEntrant WorldChampEntrant?
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @default(now()) @updatedAt
|
||||||
}
|
}
|
||||||
|
|
||||||
model VerificationToken {
|
model VerificationToken {
|
||||||
|
|||||||
127
prisma/seedAgiEval.ts
Normal file
127
prisma/seedAgiEval.ts
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import { prisma } from "~/server/db";
|
||||||
|
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||||
|
import dedent from "dedent";
|
||||||
|
import { execSync } from "child_process";
|
||||||
|
import fs from "fs";
|
||||||
|
|
||||||
|
const defaultId = "11111111-1111-1111-1111-111111111112";
|
||||||
|
|
||||||
|
await prisma.organization.deleteMany({
|
||||||
|
where: { id: defaultId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// If there's an existing org, just seed into it
|
||||||
|
const org =
|
||||||
|
(await prisma.organization.findFirst({})) ??
|
||||||
|
(await prisma.organization.create({
|
||||||
|
data: { id: defaultId },
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Clone the repo from git@github.com:microsoft/AGIEval.git into a tmp dir if it doesn't exist
|
||||||
|
const tmpDir = "/tmp/agi-eval";
|
||||||
|
if (!fs.existsSync(tmpDir)) {
|
||||||
|
execSync(`git clone git@github.com:microsoft/AGIEval.git ${tmpDir}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const datasets = [
|
||||||
|
"sat-en",
|
||||||
|
"sat-math",
|
||||||
|
"lsat-rc",
|
||||||
|
"lsat-ar",
|
||||||
|
"aqua-rat",
|
||||||
|
"logiqa-en",
|
||||||
|
"lsat-lr",
|
||||||
|
"math",
|
||||||
|
];
|
||||||
|
|
||||||
|
type Scenario = {
|
||||||
|
passage: string | null;
|
||||||
|
question: string;
|
||||||
|
options: string[] | null;
|
||||||
|
label: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const dataset of datasets) {
|
||||||
|
const experimentName = `AGI-Eval: ${dataset}`;
|
||||||
|
const oldExperiment = await prisma.experiment.findFirst({
|
||||||
|
where: {
|
||||||
|
label: experimentName,
|
||||||
|
organizationId: org.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (oldExperiment) {
|
||||||
|
await prisma.experiment.deleteMany({
|
||||||
|
where: { id: oldExperiment.id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const experiment = await prisma.experiment.create({
|
||||||
|
data: {
|
||||||
|
id: oldExperiment?.id ?? undefined,
|
||||||
|
label: experimentName,
|
||||||
|
organizationId: org.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const scenarios: Scenario[] = fs
|
||||||
|
.readFileSync(`${tmpDir}/data/v1/${dataset}.jsonl`, "utf8")
|
||||||
|
.split("\n")
|
||||||
|
.filter((line) => line.length > 0)
|
||||||
|
.map((line) => JSON.parse(line) as Scenario);
|
||||||
|
console.log("scenarios", scenarios.length);
|
||||||
|
|
||||||
|
await prisma.testScenario.createMany({
|
||||||
|
data: scenarios.slice(0, 30).map((scenario, i) => ({
|
||||||
|
experimentId: experiment.id,
|
||||||
|
sortIndex: i,
|
||||||
|
variableValues: {
|
||||||
|
passage: scenario.passage,
|
||||||
|
question: scenario.question,
|
||||||
|
options: scenario.options?.join("\n"),
|
||||||
|
label: scenario.label,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.templateVariable.createMany({
|
||||||
|
data: ["passage", "question", "options", "label"].map((label) => ({
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label,
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.promptVariant.createMany({
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label: "Prompt Variant 1",
|
||||||
|
sortIndex: 0,
|
||||||
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
modelProvider: "openai/ChatCompletion",
|
||||||
|
constructFnVersion: 1,
|
||||||
|
constructFn: dedent`
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`Passage: ${"$"}{scenario.passage}\n\nQuestion: ${"$"}{scenario.question}\n\nOptions: ${"$"}{scenario.options}\n\n Respond with just the letter of the best option in the format Answer: (A).\`
|
||||||
|
}
|
||||||
|
],
|
||||||
|
temperature: 0,
|
||||||
|
})`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.evaluation.createMany({
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label: "Eval",
|
||||||
|
evalType: "CONTAINS",
|
||||||
|
value: "Answer: ({{label}})",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
}
|
||||||
113
prisma/seedTwitterSentiment.ts
Normal file
113
prisma/seedTwitterSentiment.ts
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
import { prisma } from "~/server/db";
|
||||||
|
import dedent from "dedent";
|
||||||
|
import fs from "fs";
|
||||||
|
import { parse } from "csv-parse/sync";
|
||||||
|
|
||||||
|
const defaultId = "11111111-1111-1111-1111-111111111112";
|
||||||
|
|
||||||
|
await prisma.organization.deleteMany({
|
||||||
|
where: { id: defaultId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// If there's an existing org, just seed into it
|
||||||
|
const org =
|
||||||
|
(await prisma.organization.findFirst({})) ??
|
||||||
|
(await prisma.organization.create({
|
||||||
|
data: { id: defaultId },
|
||||||
|
}));
|
||||||
|
|
||||||
|
type Scenario = {
|
||||||
|
text: string;
|
||||||
|
sentiment: string;
|
||||||
|
emotion: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const experimentName = `Twitter Sentiment Analysis`;
|
||||||
|
const oldExperiment = await prisma.experiment.findFirst({
|
||||||
|
where: {
|
||||||
|
label: experimentName,
|
||||||
|
organizationId: org.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (oldExperiment) {
|
||||||
|
await prisma.experiment.deleteMany({
|
||||||
|
where: { id: oldExperiment.id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const experiment = await prisma.experiment.create({
|
||||||
|
data: {
|
||||||
|
id: oldExperiment?.id ?? undefined,
|
||||||
|
label: experimentName,
|
||||||
|
organizationId: org.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = fs.readFileSync("./prisma/datasets/validated_tweets.csv", "utf8");
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
const records: any[] = parse(content, { delimiter: ",", from_line: 2 });
|
||||||
|
|
||||||
|
console.log("records", records);
|
||||||
|
|
||||||
|
const scenarios: Scenario[] = records.map((row) => ({
|
||||||
|
text: row[0],
|
||||||
|
sentiment: row[1],
|
||||||
|
emotion: row[2],
|
||||||
|
}));
|
||||||
|
|
||||||
|
console.log("scenarios", scenarios.length);
|
||||||
|
|
||||||
|
await prisma.testScenario.createMany({
|
||||||
|
data: scenarios.slice(0, 30).map((scenario, i) => ({
|
||||||
|
experimentId: experiment.id,
|
||||||
|
sortIndex: i,
|
||||||
|
variableValues: {
|
||||||
|
text: scenario.text,
|
||||||
|
sentiment: scenario.sentiment,
|
||||||
|
emotion: scenario.emotion,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.templateVariable.createMany({
|
||||||
|
data: ["text", "sentiment", "emotion"].map((label) => ({
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label,
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.promptVariant.createMany({
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label: "Prompt Variant 1",
|
||||||
|
sortIndex: 0,
|
||||||
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
modelProvider: "openai/ChatCompletion",
|
||||||
|
constructFnVersion: 1,
|
||||||
|
constructFn: dedent`
|
||||||
|
definePrompt("openai/ChatCompletion", {
|
||||||
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: \`Text: ${"$"}{scenario.text}\n\nRespond with the sentiment (negative|neutral|positive) and emotion (optimism|joy|anger|sadness) of the tweet in this format: "answer: <sentiment>-<emotion>".\`
|
||||||
|
}
|
||||||
|
],
|
||||||
|
temperature: 0,
|
||||||
|
})`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.evaluation.createMany({
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
experimentId: experiment.id,
|
||||||
|
label: "Eval",
|
||||||
|
evalType: "CONTAINS",
|
||||||
|
value: "answer: {{sentiment}}-{{emotion}}",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
BIN
public/fonts/Inconsolata_SemiExpanded-Medium.ttf
Normal file
BIN
public/fonts/Inconsolata_SemiExpanded-Medium.ttf
Normal file
Binary file not shown.
BIN
public/og.png
Normal file
BIN
public/og.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 62 KiB |
@@ -36,17 +36,9 @@ export const DeleteButton = () => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Button
|
<Button size="sm" variant="ghost" colorScheme="red" fontWeight="normal" onClick={onOpen}>
|
||||||
size="sm"
|
|
||||||
variant={{ base: "outline", lg: "ghost" }}
|
|
||||||
colorScheme="red"
|
|
||||||
fontWeight="normal"
|
|
||||||
onClick={onOpen}
|
|
||||||
>
|
|
||||||
<Icon as={BsTrash} boxSize={4} />
|
<Icon as={BsTrash} boxSize={4} />
|
||||||
<Text display={{ base: "none", lg: "block" }} ml={2}>
|
<Text ml={2}>Delete Experiment</Text>
|
||||||
Delete Experiment
|
|
||||||
</Text>
|
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
<AlertDialog isOpen={isOpen} leastDestructiveRef={cancelRef} onClose={onClose}>
|
<AlertDialog isOpen={isOpen} leastDestructiveRef={cancelRef} onClose={onClose}>
|
||||||
|
|||||||
@@ -37,7 +37,6 @@ export const FloatingLabelInput = ({
|
|||||||
borderColor={isFocused ? "blue.500" : "gray.400"}
|
borderColor={isFocused ? "blue.500" : "gray.400"}
|
||||||
autoComplete="off"
|
autoComplete="off"
|
||||||
value={value}
|
value={value}
|
||||||
maxHeight={32}
|
|
||||||
overflowY="auto"
|
overflowY="auto"
|
||||||
overflowX="hidden"
|
overflowX="hidden"
|
||||||
{...props}
|
{...props}
|
||||||
|
|||||||
19
src/components/OutputsTable/OutputCell/CellContent.tsx
Normal file
19
src/components/OutputsTable/OutputCell/CellContent.tsx
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { type StackProps, VStack } from "@chakra-ui/react";
|
||||||
|
import { CellOptions } from "./CellOptions";
|
||||||
|
|
||||||
|
export const CellContent = ({
|
||||||
|
hardRefetch,
|
||||||
|
hardRefetching,
|
||||||
|
children,
|
||||||
|
...props
|
||||||
|
}: {
|
||||||
|
hardRefetch: () => void;
|
||||||
|
hardRefetching: boolean;
|
||||||
|
} & StackProps) => (
|
||||||
|
<VStack w="full" alignItems="flex-start" {...props}>
|
||||||
|
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} />
|
||||||
|
<VStack w="full" alignItems="flex-start" maxH={500} overflowY="auto">
|
||||||
|
{children}
|
||||||
|
</VStack>
|
||||||
|
</VStack>
|
||||||
|
);
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { Button, HStack, Icon, Tooltip } from "@chakra-ui/react";
|
import { Button, HStack, Icon, Spinner, Tooltip } from "@chakra-ui/react";
|
||||||
import { BsArrowClockwise } from "react-icons/bs";
|
import { BsArrowClockwise } from "react-icons/bs";
|
||||||
import { useExperimentAccess } from "~/utils/hooks";
|
import { useExperimentAccess } from "~/utils/hooks";
|
||||||
|
|
||||||
@@ -12,7 +12,7 @@ export const CellOptions = ({
|
|||||||
const { canModify } = useExperimentAccess();
|
const { canModify } = useExperimentAccess();
|
||||||
return (
|
return (
|
||||||
<HStack justifyContent="flex-end" w="full">
|
<HStack justifyContent="flex-end" w="full">
|
||||||
{!refetchingOutput && canModify && (
|
{canModify && (
|
||||||
<Tooltip label="Refetch output" aria-label="refetch output">
|
<Tooltip label="Refetch output" aria-label="refetch output">
|
||||||
<Button
|
<Button
|
||||||
size="xs"
|
size="xs"
|
||||||
@@ -28,7 +28,7 @@ export const CellOptions = ({
|
|||||||
onClick={refetchOutput}
|
onClick={refetchOutput}
|
||||||
aria-label="refetch output"
|
aria-label="refetch output"
|
||||||
>
|
>
|
||||||
<Icon as={BsArrowClockwise} boxSize={4} />
|
<Icon as={refetchingOutput ? Spinner : BsArrowClockwise} boxSize={4} />
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -1,16 +1,19 @@
|
|||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { type PromptVariant, type Scenario } from "../types";
|
import { type PromptVariant, type Scenario } from "../types";
|
||||||
import { Spinner, Text, Center, VStack } from "@chakra-ui/react";
|
import { Text, VStack } from "@chakra-ui/react";
|
||||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
import SyntaxHighlighter from "react-syntax-highlighter";
|
import SyntaxHighlighter from "react-syntax-highlighter";
|
||||||
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
|
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
|
||||||
import stringify from "json-stringify-pretty-compact";
|
import stringify from "json-stringify-pretty-compact";
|
||||||
import { type ReactElement, useState, useEffect } from "react";
|
import { type ReactElement, useState, useEffect, Fragment } from "react";
|
||||||
import useSocket from "~/utils/useSocket";
|
import useSocket from "~/utils/useSocket";
|
||||||
import { OutputStats } from "./OutputStats";
|
import { OutputStats } from "./OutputStats";
|
||||||
import { ErrorHandler } from "./ErrorHandler";
|
import { RetryCountdown } from "./RetryCountdown";
|
||||||
import { CellOptions } from "./CellOptions";
|
|
||||||
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
|
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
|
||||||
|
import { ResponseLog } from "./ResponseLog";
|
||||||
|
import { CellContent } from "./CellContent";
|
||||||
|
|
||||||
|
const WAITING_MESSAGE_INTERVAL = 20000;
|
||||||
|
|
||||||
export default function OutputCell({
|
export default function OutputCell({
|
||||||
scenario,
|
scenario,
|
||||||
@@ -60,51 +63,97 @@ export default function OutputCell({
|
|||||||
|
|
||||||
const awaitingOutput =
|
const awaitingOutput =
|
||||||
!cell ||
|
!cell ||
|
||||||
|
!cell.evalsComplete ||
|
||||||
cell.retrievalStatus === "PENDING" ||
|
cell.retrievalStatus === "PENDING" ||
|
||||||
cell.retrievalStatus === "IN_PROGRESS" ||
|
cell.retrievalStatus === "IN_PROGRESS" ||
|
||||||
hardRefetching;
|
hardRefetching;
|
||||||
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : 0), [awaitingOutput]);
|
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : 0), [awaitingOutput]);
|
||||||
|
|
||||||
const modelOutput = cell?.modelOutput;
|
|
||||||
|
|
||||||
// TODO: disconnect from socket if we're not streaming anymore
|
// TODO: disconnect from socket if we're not streaming anymore
|
||||||
const streamedMessage = useSocket<OutputSchema>(cell?.id);
|
const streamedMessage = useSocket<OutputSchema>(cell?.id);
|
||||||
|
|
||||||
if (!vars) return null;
|
if (!vars) return null;
|
||||||
|
|
||||||
if (disabledReason) return <Text color="gray.500">{disabledReason}</Text>;
|
|
||||||
|
|
||||||
if (awaitingOutput && !streamedMessage)
|
|
||||||
return (
|
|
||||||
<Center h="100%" w="100%">
|
|
||||||
<Spinner />
|
|
||||||
</Center>
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!cell && !fetchingOutput)
|
if (!cell && !fetchingOutput)
|
||||||
return (
|
return (
|
||||||
<VStack>
|
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
|
||||||
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} />
|
|
||||||
<Text color="gray.500">Error retrieving output</Text>
|
<Text color="gray.500">Error retrieving output</Text>
|
||||||
</VStack>
|
</CellContent>
|
||||||
);
|
);
|
||||||
|
|
||||||
if (cell && cell.errorMessage) {
|
if (cell && cell.errorMessage) {
|
||||||
return (
|
return (
|
||||||
<VStack>
|
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
|
||||||
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} />
|
<Text color="red.500">{cell.errorMessage}</Text>
|
||||||
<ErrorHandler cell={cell} refetchOutput={hardRefetch} />
|
</CellContent>
|
||||||
</VStack>
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const normalizedOutput = modelOutput
|
if (disabledReason) return <Text color="gray.500">{disabledReason}</Text>;
|
||||||
? provider.normalizeOutput(modelOutput.output)
|
|
||||||
|
const mostRecentResponse = cell?.modelResponses[cell.modelResponses.length - 1];
|
||||||
|
const showLogs = !streamedMessage && !mostRecentResponse?.output;
|
||||||
|
|
||||||
|
if (showLogs)
|
||||||
|
return (
|
||||||
|
<CellContent
|
||||||
|
hardRefetching={hardRefetching}
|
||||||
|
hardRefetch={hardRefetch}
|
||||||
|
alignItems="flex-start"
|
||||||
|
fontFamily="inconsolata, monospace"
|
||||||
|
spacing={0}
|
||||||
|
>
|
||||||
|
{cell?.jobQueuedAt && <ResponseLog time={cell.jobQueuedAt} title="Job queued" />}
|
||||||
|
{cell?.jobStartedAt && <ResponseLog time={cell.jobStartedAt} title="Job started" />}
|
||||||
|
{cell?.modelResponses?.map((response) => {
|
||||||
|
let numWaitingMessages = 0;
|
||||||
|
const relativeWaitingTime = response.receivedAt
|
||||||
|
? response.receivedAt.getTime()
|
||||||
|
: Date.now();
|
||||||
|
if (response.requestedAt) {
|
||||||
|
numWaitingMessages = Math.floor(
|
||||||
|
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
<Fragment key={response.id}>
|
||||||
|
{response.requestedAt && (
|
||||||
|
<ResponseLog time={response.requestedAt} title="Request sent to API" />
|
||||||
|
)}
|
||||||
|
{response.requestedAt &&
|
||||||
|
Array.from({ length: numWaitingMessages }, (_, i) => (
|
||||||
|
<ResponseLog
|
||||||
|
key={`waiting-${i}`}
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
|
time={new Date(response.requestedAt!.getTime() + i * WAITING_MESSAGE_INTERVAL)}
|
||||||
|
title="Waiting for response"
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
{response.receivedAt && (
|
||||||
|
<ResponseLog
|
||||||
|
time={response.receivedAt}
|
||||||
|
title="Response received from API"
|
||||||
|
message={`statusCode: ${response.statusCode ?? ""}\n ${
|
||||||
|
response.errorMessage ?? ""
|
||||||
|
}`}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</Fragment>
|
||||||
|
);
|
||||||
|
}) ?? null}
|
||||||
|
{mostRecentResponse?.retryTime && (
|
||||||
|
<RetryCountdown retryTime={mostRecentResponse.retryTime} />
|
||||||
|
)}
|
||||||
|
</CellContent>
|
||||||
|
);
|
||||||
|
|
||||||
|
const normalizedOutput = mostRecentResponse?.output
|
||||||
|
? provider.normalizeOutput(mostRecentResponse?.output)
|
||||||
: streamedMessage
|
: streamedMessage
|
||||||
? provider.normalizeOutput(streamedMessage)
|
? provider.normalizeOutput(streamedMessage)
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
if (modelOutput && normalizedOutput?.type === "json") {
|
if (mostRecentResponse?.output && normalizedOutput?.type === "json") {
|
||||||
return (
|
return (
|
||||||
<VStack
|
<VStack
|
||||||
w="100%"
|
w="100%"
|
||||||
@@ -114,8 +163,13 @@ export default function OutputCell({
|
|||||||
overflowX="hidden"
|
overflowX="hidden"
|
||||||
justifyContent="space-between"
|
justifyContent="space-between"
|
||||||
>
|
>
|
||||||
<VStack w="full" flex={1} spacing={0}>
|
<CellContent
|
||||||
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} />
|
hardRefetching={hardRefetching}
|
||||||
|
hardRefetch={hardRefetch}
|
||||||
|
w="full"
|
||||||
|
flex={1}
|
||||||
|
spacing={0}
|
||||||
|
>
|
||||||
<SyntaxHighlighter
|
<SyntaxHighlighter
|
||||||
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
|
customStyle={{ overflowX: "unset", width: "100%", flex: 1 }}
|
||||||
language="json"
|
language="json"
|
||||||
@@ -127,8 +181,8 @@ export default function OutputCell({
|
|||||||
>
|
>
|
||||||
{stringify(normalizedOutput.value, { maxLength: 40 })}
|
{stringify(normalizedOutput.value, { maxLength: 40 })}
|
||||||
</SyntaxHighlighter>
|
</SyntaxHighlighter>
|
||||||
</VStack>
|
</CellContent>
|
||||||
<OutputStats modelOutput={modelOutput} scenario={scenario} />
|
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
|
||||||
</VStack>
|
</VStack>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -138,10 +192,13 @@ export default function OutputCell({
|
|||||||
return (
|
return (
|
||||||
<VStack w="100%" h="100%" justifyContent="space-between" whiteSpace="pre-wrap">
|
<VStack w="100%" h="100%" justifyContent="space-between" whiteSpace="pre-wrap">
|
||||||
<VStack w="full" alignItems="flex-start" spacing={0}>
|
<VStack w="full" alignItems="flex-start" spacing={0}>
|
||||||
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} />
|
<CellContent hardRefetching={hardRefetching} hardRefetch={hardRefetch}>
|
||||||
<Text>{contentToDisplay}</Text>
|
<Text>{contentToDisplay}</Text>
|
||||||
|
</CellContent>
|
||||||
</VStack>
|
</VStack>
|
||||||
{modelOutput && <OutputStats modelOutput={modelOutput} scenario={scenario} />}
|
{mostRecentResponse?.output && (
|
||||||
|
<OutputStats modelResponse={mostRecentResponse} scenario={scenario} />
|
||||||
|
)}
|
||||||
</VStack>
|
</VStack>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,28 +7,32 @@ import { CostTooltip } from "~/components/tooltip/CostTooltip";
|
|||||||
const SHOW_TIME = true;
|
const SHOW_TIME = true;
|
||||||
|
|
||||||
export const OutputStats = ({
|
export const OutputStats = ({
|
||||||
modelOutput,
|
modelResponse,
|
||||||
}: {
|
}: {
|
||||||
modelOutput: NonNullable<
|
modelResponse: NonNullable<
|
||||||
NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>["modelOutput"]
|
NonNullable<RouterOutputs["scenarioVariantCells"]["get"]>["modelResponses"][0]
|
||||||
>;
|
>;
|
||||||
scenario: Scenario;
|
scenario: Scenario;
|
||||||
}) => {
|
}) => {
|
||||||
const timeToComplete = modelOutput.timeToComplete;
|
const timeToComplete =
|
||||||
|
modelResponse.receivedAt && modelResponse.requestedAt
|
||||||
|
? modelResponse.receivedAt.getTime() - modelResponse.requestedAt.getTime()
|
||||||
|
: 0;
|
||||||
|
|
||||||
const promptTokens = modelOutput.promptTokens;
|
const promptTokens = modelResponse.promptTokens;
|
||||||
const completionTokens = modelOutput.completionTokens;
|
const completionTokens = modelResponse.completionTokens;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<HStack w="full" align="center" color="gray.500" fontSize="2xs" mt={{ base: 0, md: 1 }}>
|
<HStack w="full" align="center" color="gray.500" fontSize="2xs" mt={{ base: 0, md: 1 }}>
|
||||||
<HStack flex={1}>
|
<HStack flex={1}>
|
||||||
{modelOutput.outputEvaluations.map((evaluation) => {
|
{modelResponse.outputEvaluations.map((evaluation) => {
|
||||||
const passed = evaluation.result > 0.5;
|
const passed = evaluation.result > 0.5;
|
||||||
return (
|
return (
|
||||||
<Tooltip
|
<Tooltip
|
||||||
isDisabled={!evaluation.details}
|
isDisabled={!evaluation.details}
|
||||||
label={evaluation.details}
|
label={evaluation.details}
|
||||||
key={evaluation.id}
|
key={evaluation.id}
|
||||||
|
shouldWrapChildren
|
||||||
>
|
>
|
||||||
<HStack spacing={0}>
|
<HStack spacing={0}>
|
||||||
<Text>{evaluation.evaluation.label}</Text>
|
<Text>{evaluation.evaluation.label}</Text>
|
||||||
@@ -42,15 +46,15 @@ export const OutputStats = ({
|
|||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
</HStack>
|
</HStack>
|
||||||
{modelOutput.cost && (
|
{modelResponse.cost && (
|
||||||
<CostTooltip
|
<CostTooltip
|
||||||
promptTokens={promptTokens}
|
promptTokens={promptTokens}
|
||||||
completionTokens={completionTokens}
|
completionTokens={completionTokens}
|
||||||
cost={modelOutput.cost}
|
cost={modelResponse.cost}
|
||||||
>
|
>
|
||||||
<HStack spacing={0}>
|
<HStack spacing={0}>
|
||||||
<Icon as={BsCurrencyDollar} />
|
<Icon as={BsCurrencyDollar} />
|
||||||
<Text mr={1}>{modelOutput.cost.toFixed(3)}</Text>
|
<Text mr={1}>{modelResponse.cost.toFixed(3)}</Text>
|
||||||
</HStack>
|
</HStack>
|
||||||
</CostTooltip>
|
</CostTooltip>
|
||||||
)}
|
)}
|
||||||
|
|||||||
22
src/components/OutputsTable/OutputCell/ResponseLog.tsx
Normal file
22
src/components/OutputsTable/OutputCell/ResponseLog.tsx
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { HStack, VStack, Text } from "@chakra-ui/react";
|
||||||
|
import dayjs from "dayjs";
|
||||||
|
|
||||||
|
export const ResponseLog = ({
|
||||||
|
time,
|
||||||
|
title,
|
||||||
|
message,
|
||||||
|
}: {
|
||||||
|
time: Date;
|
||||||
|
title: string;
|
||||||
|
message?: string;
|
||||||
|
}) => {
|
||||||
|
return (
|
||||||
|
<VStack spacing={0} alignItems="flex-start">
|
||||||
|
<HStack>
|
||||||
|
<Text>{dayjs(time).format("HH:mm:ss")}</Text>
|
||||||
|
<Text>{title}</Text>
|
||||||
|
</HStack>
|
||||||
|
{message && <Text pl={4}>{message}</Text>}
|
||||||
|
</VStack>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -1,21 +1,12 @@
|
|||||||
import { type ScenarioVariantCell } from "@prisma/client";
|
import { Text } from "@chakra-ui/react";
|
||||||
import { VStack, Text } from "@chakra-ui/react";
|
|
||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
import pluralize from "pluralize";
|
import pluralize from "pluralize";
|
||||||
|
|
||||||
export const ErrorHandler = ({
|
export const RetryCountdown = ({ retryTime }: { retryTime: Date }) => {
|
||||||
cell,
|
|
||||||
refetchOutput,
|
|
||||||
}: {
|
|
||||||
cell: ScenarioVariantCell;
|
|
||||||
refetchOutput: () => void;
|
|
||||||
}) => {
|
|
||||||
const [msToWait, setMsToWait] = useState(0);
|
const [msToWait, setMsToWait] = useState(0);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!cell.retryTime) return;
|
const initialWaitTime = retryTime.getTime() - Date.now();
|
||||||
|
|
||||||
const initialWaitTime = cell.retryTime.getTime() - Date.now();
|
|
||||||
const msModuloOneSecond = initialWaitTime % 1000;
|
const msModuloOneSecond = initialWaitTime % 1000;
|
||||||
let remainingTime = initialWaitTime - msModuloOneSecond;
|
let remainingTime = initialWaitTime - msModuloOneSecond;
|
||||||
setMsToWait(remainingTime);
|
setMsToWait(remainingTime);
|
||||||
@@ -36,18 +27,13 @@ export const ErrorHandler = ({
|
|||||||
clearInterval(interval);
|
clearInterval(interval);
|
||||||
clearTimeout(timeout);
|
clearTimeout(timeout);
|
||||||
};
|
};
|
||||||
}, [cell.retryTime, cell.statusCode, setMsToWait, refetchOutput]);
|
}, [retryTime]);
|
||||||
|
|
||||||
|
if (msToWait <= 0) return null;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<VStack w="full">
|
|
||||||
<Text color="red.600" wordBreak="break-word">
|
|
||||||
{cell.errorMessage}
|
|
||||||
</Text>
|
|
||||||
{msToWait > 0 && (
|
|
||||||
<Text color="red.600" fontSize="sm">
|
<Text color="red.600" fontSize="sm">
|
||||||
Retrying in {pluralize("second", Math.ceil(msToWait / 1000), true)}...
|
Retrying in {pluralize("second", Math.ceil(msToWait / 1000), true)}...
|
||||||
</Text>
|
</Text>
|
||||||
)}
|
|
||||||
</VStack>
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@@ -1,15 +1,27 @@
|
|||||||
import { type DragEvent } from "react";
|
import { useEffect, type DragEvent } from "react";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { isEqual } from "lodash-es";
|
import { isEqual } from "lodash-es";
|
||||||
import { type Scenario } from "./types";
|
import { type Scenario } from "./types";
|
||||||
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
|
|
||||||
import { Box, Button, Flex, HStack, Icon, Spinner, Stack, Tooltip, VStack } from "@chakra-ui/react";
|
import {
|
||||||
|
Box,
|
||||||
|
Button,
|
||||||
|
HStack,
|
||||||
|
Icon,
|
||||||
|
IconButton,
|
||||||
|
Spinner,
|
||||||
|
Stack,
|
||||||
|
Tooltip,
|
||||||
|
VStack,
|
||||||
|
Text,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
import { cellPadding } from "../constants";
|
import { cellPadding } from "../constants";
|
||||||
import { BsX } from "react-icons/bs";
|
import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
|
||||||
import { RiDraggable } from "react-icons/ri";
|
import { RiDraggable } from "react-icons/ri";
|
||||||
import { FloatingLabelInput } from "./FloatingLabelInput";
|
import { FloatingLabelInput } from "./FloatingLabelInput";
|
||||||
|
import { ScenarioEditorModal } from "./ScenarioEditorModal";
|
||||||
|
|
||||||
export default function ScenarioEditor({
|
export default function ScenarioEditor({
|
||||||
scenario,
|
scenario,
|
||||||
@@ -28,6 +40,10 @@ export default function ScenarioEditor({
|
|||||||
|
|
||||||
const [values, setValues] = useState<Record<string, string>>(savedValues);
|
const [values, setValues] = useState<Record<string, string>>(savedValues);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (savedValues) setValues(savedValues);
|
||||||
|
}, [savedValues]);
|
||||||
|
|
||||||
const experiment = useExperiment();
|
const experiment = useExperiment();
|
||||||
const vars = api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
|
const vars = api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
|
||||||
|
|
||||||
@@ -71,7 +87,10 @@ export default function ScenarioEditor({
|
|||||||
[reorderMutation, scenario.id],
|
[reorderMutation, scenario.id],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const [scenarioEditorModalOpen, setScenarioEditorModalOpen] = useState(false);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
<>
|
||||||
<HStack
|
<HStack
|
||||||
alignItems="flex-start"
|
alignItems="flex-start"
|
||||||
px={cellPadding.x}
|
px={cellPadding.x}
|
||||||
@@ -130,24 +149,36 @@ export default function ScenarioEditor({
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
{variableLabels.length === 0 ? (
|
{variableLabels.length === 0 ? (
|
||||||
<Box color="gray.500">{vars.data ? "No scenario variables configured" : "Loading..."}</Box>
|
<Box color="gray.500">
|
||||||
|
{vars.data ? "No scenario variables configured" : "Loading..."}
|
||||||
|
</Box>
|
||||||
) : (
|
) : (
|
||||||
<VStack spacing={4} flex={1} py={2}>
|
<VStack spacing={4} flex={1} py={2}>
|
||||||
|
<HStack justifyContent="space-between" w="100%">
|
||||||
|
<Text color="gray.500">Scenario</Text>
|
||||||
|
<IconButton
|
||||||
|
className="fullscreen-toggle"
|
||||||
|
aria-label="Maximize"
|
||||||
|
icon={<BsArrowsAngleExpand />}
|
||||||
|
onClick={() => setScenarioEditorModalOpen(true)}
|
||||||
|
boxSize={6}
|
||||||
|
borderRadius={4}
|
||||||
|
p={1.5}
|
||||||
|
minW={0}
|
||||||
|
colorScheme="gray"
|
||||||
|
color="gray.500"
|
||||||
|
variant="ghost"
|
||||||
|
/>
|
||||||
|
</HStack>
|
||||||
{variableLabels.map((key) => {
|
{variableLabels.map((key) => {
|
||||||
const value = values[key] ?? "";
|
const value = values[key] ?? "";
|
||||||
const layoutDirection = value.length > 20 ? "column" : "row";
|
|
||||||
return (
|
return (
|
||||||
<Flex
|
|
||||||
key={key}
|
|
||||||
direction={layoutDirection}
|
|
||||||
alignItems={layoutDirection === "column" ? "flex-start" : "center"}
|
|
||||||
flexWrap="wrap"
|
|
||||||
width="full"
|
|
||||||
>
|
|
||||||
<FloatingLabelInput
|
<FloatingLabelInput
|
||||||
|
key={key}
|
||||||
label={key}
|
label={key}
|
||||||
isDisabled={!canModify}
|
isDisabled={!canModify}
|
||||||
style={{ width: "100%" }}
|
style={{ width: "100%" }}
|
||||||
|
maxHeight={32}
|
||||||
value={value}
|
value={value}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setValues((prev) => ({ ...prev, [key]: e.target.value }));
|
setValues((prev) => ({ ...prev, [key]: e.target.value }));
|
||||||
@@ -162,7 +193,6 @@ export default function ScenarioEditor({
|
|||||||
onMouseEnter={() => setVariableInputHovered(true)}
|
onMouseEnter={() => setVariableInputHovered(true)}
|
||||||
onMouseLeave={() => setVariableInputHovered(false)}
|
onMouseLeave={() => setVariableInputHovered(false)}
|
||||||
/>
|
/>
|
||||||
</Flex>
|
|
||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
{hasChanged && (
|
{hasChanged && (
|
||||||
@@ -184,5 +214,13 @@ export default function ScenarioEditor({
|
|||||||
</VStack>
|
</VStack>
|
||||||
)}
|
)}
|
||||||
</HStack>
|
</HStack>
|
||||||
|
{scenarioEditorModalOpen && (
|
||||||
|
<ScenarioEditorModal
|
||||||
|
scenarioId={scenario.id}
|
||||||
|
initialValues={savedValues}
|
||||||
|
onClose={() => setScenarioEditorModalOpen(false)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
132
src/components/OutputsTable/ScenarioEditorModal.tsx
Normal file
132
src/components/OutputsTable/ScenarioEditorModal.tsx
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
import {
|
||||||
|
Button,
|
||||||
|
HStack,
|
||||||
|
Icon,
|
||||||
|
Modal,
|
||||||
|
ModalBody,
|
||||||
|
ModalCloseButton,
|
||||||
|
ModalContent,
|
||||||
|
ModalFooter,
|
||||||
|
ModalHeader,
|
||||||
|
ModalOverlay,
|
||||||
|
Spinner,
|
||||||
|
Text,
|
||||||
|
VStack,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
|
import { useEffect, useState } from "react";
|
||||||
|
import { BsFileTextFill } from "react-icons/bs";
|
||||||
|
import { isEqual } from "lodash-es";
|
||||||
|
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import {
|
||||||
|
useScenario,
|
||||||
|
useHandledAsyncCallback,
|
||||||
|
useExperiment,
|
||||||
|
useExperimentAccess,
|
||||||
|
} from "~/utils/hooks";
|
||||||
|
import { FloatingLabelInput } from "./FloatingLabelInput";
|
||||||
|
|
||||||
|
export const ScenarioEditorModal = ({
|
||||||
|
scenarioId,
|
||||||
|
initialValues,
|
||||||
|
onClose,
|
||||||
|
}: {
|
||||||
|
scenarioId: string;
|
||||||
|
initialValues: Record<string, string>;
|
||||||
|
onClose: () => void;
|
||||||
|
}) => {
|
||||||
|
const utils = api.useContext();
|
||||||
|
const experiment = useExperiment();
|
||||||
|
const { canModify } = useExperimentAccess();
|
||||||
|
const scenario = useScenario(scenarioId);
|
||||||
|
|
||||||
|
const savedValues = scenario.data?.variableValues as Record<string, string>;
|
||||||
|
|
||||||
|
const [values, setValues] = useState<Record<string, string>>(initialValues);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (savedValues) setValues(savedValues);
|
||||||
|
}, [savedValues]);
|
||||||
|
|
||||||
|
const hasChanged = !isEqual(savedValues, values);
|
||||||
|
|
||||||
|
const mutation = api.scenarios.replaceWithValues.useMutation();
|
||||||
|
|
||||||
|
const [onSave, saving] = useHandledAsyncCallback(async () => {
|
||||||
|
await mutation.mutateAsync({
|
||||||
|
id: scenarioId,
|
||||||
|
values,
|
||||||
|
});
|
||||||
|
await utils.scenarios.list.invalidate();
|
||||||
|
}, [mutation, values]);
|
||||||
|
|
||||||
|
console.log("scenario", scenario);
|
||||||
|
|
||||||
|
const vars = api.templateVars.list.useQuery({ experimentId: experiment.data?.id ?? "" });
|
||||||
|
const variableLabels = vars.data?.map((v) => v.label) ?? [];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Modal
|
||||||
|
isOpen
|
||||||
|
onClose={onClose}
|
||||||
|
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "5xl", xl: "7xl" }}
|
||||||
|
>
|
||||||
|
<ModalOverlay />
|
||||||
|
<ModalContent w={1200}>
|
||||||
|
<ModalHeader>
|
||||||
|
<HStack>
|
||||||
|
<Icon as={BsFileTextFill} />
|
||||||
|
<Text>Scenario</Text>
|
||||||
|
</HStack>
|
||||||
|
</ModalHeader>
|
||||||
|
<ModalCloseButton />
|
||||||
|
<ModalBody maxW="unset">
|
||||||
|
<VStack spacing={8}>
|
||||||
|
{values &&
|
||||||
|
variableLabels.map((key) => {
|
||||||
|
const value = values[key] ?? "";
|
||||||
|
return (
|
||||||
|
<FloatingLabelInput
|
||||||
|
key={key}
|
||||||
|
label={key}
|
||||||
|
isDisabled={!canModify}
|
||||||
|
_disabled={{ opacity: 1 }}
|
||||||
|
style={{ width: "100%" }}
|
||||||
|
value={value}
|
||||||
|
onChange={(e) => {
|
||||||
|
setValues((prev) => ({ ...prev, [key]: e.target.value }));
|
||||||
|
}}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
|
||||||
|
e.preventDefault();
|
||||||
|
e.currentTarget.blur();
|
||||||
|
onSave();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</VStack>
|
||||||
|
</ModalBody>
|
||||||
|
|
||||||
|
<ModalFooter>
|
||||||
|
{canModify && (
|
||||||
|
<HStack>
|
||||||
|
<Button
|
||||||
|
colorScheme="gray"
|
||||||
|
onClick={() => setValues(savedValues)}
|
||||||
|
minW={24}
|
||||||
|
isDisabled={!hasChanged}
|
||||||
|
>
|
||||||
|
<Text>Reset</Text>
|
||||||
|
</Button>
|
||||||
|
<Button colorScheme="blue" onClick={onSave} minW={24} isDisabled={!hasChanged}>
|
||||||
|
{saving ? <Spinner boxSize={4} /> : <Text>Save</Text>}
|
||||||
|
</Button>
|
||||||
|
</HStack>
|
||||||
|
)}
|
||||||
|
</ModalFooter>
|
||||||
|
</ModalContent>
|
||||||
|
</Modal>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -21,17 +21,14 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
|||||||
completionTokens: 0,
|
completionTokens: 0,
|
||||||
scenarioCount: 0,
|
scenarioCount: 0,
|
||||||
outputCount: 0,
|
outputCount: 0,
|
||||||
awaitingRetrievals: false,
|
awaitingEvals: false,
|
||||||
},
|
},
|
||||||
refetchInterval,
|
refetchInterval,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Poll every two seconds while we are waiting for LLM retrievals to finish
|
// Poll every two seconds while we are waiting for LLM retrievals to finish
|
||||||
useEffect(
|
useEffect(() => setRefetchInterval(data.awaitingEvals ? 5000 : 0), [data.awaitingEvals]);
|
||||||
() => setRefetchInterval(data.awaitingRetrievals ? 2000 : 0),
|
|
||||||
[data.awaitingRetrievals],
|
|
||||||
);
|
|
||||||
|
|
||||||
const [passColor, neutralColor, failColor] = useToken("colors", [
|
const [passColor, neutralColor, failColor] = useToken("colors", [
|
||||||
"green.500",
|
"green.500",
|
||||||
@@ -51,12 +48,12 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
|||||||
fontSize="xs"
|
fontSize="xs"
|
||||||
py={cellPadding.y}
|
py={cellPadding.y}
|
||||||
>
|
>
|
||||||
|
<HStack px={cellPadding.x}>
|
||||||
{showNumFinished && (
|
{showNumFinished && (
|
||||||
<Text>
|
<Text>
|
||||||
{data.outputCount} / {data.scenarioCount}
|
{data.outputCount} / {data.scenarioCount}
|
||||||
</Text>
|
</Text>
|
||||||
)}
|
)}
|
||||||
<HStack px={cellPadding.x}>
|
|
||||||
{data.evalResults.map((result) => {
|
{data.evalResults.map((result) => {
|
||||||
const passedFrac = result.passCount / result.totalCount;
|
const passedFrac = result.passCount / result.totalCount;
|
||||||
return (
|
return (
|
||||||
@@ -69,7 +66,7 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
|||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
</HStack>
|
</HStack>
|
||||||
{data.overallCost && !data.awaitingRetrievals && (
|
{data.overallCost && (
|
||||||
<CostTooltip
|
<CostTooltip
|
||||||
promptTokens={data.promptTokens}
|
promptTokens={data.promptTokens}
|
||||||
completionTokens={data.completionTokens}
|
completionTokens={data.completionTokens}
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
|||||||
<Grid
|
<Grid
|
||||||
pt={4}
|
pt={4}
|
||||||
pb={24}
|
pb={24}
|
||||||
pl={4}
|
pl={8}
|
||||||
display="grid"
|
display="grid"
|
||||||
gridTemplateColumns={`250px repeat(${variants.data.length}, minmax(300px, 1fr)) auto`}
|
gridTemplateColumns={`250px repeat(${variants.data.length}, minmax(300px, 1fr)) auto`}
|
||||||
sx={{
|
sx={{
|
||||||
@@ -53,6 +53,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
|||||||
colStart: i + 2,
|
colStart: i + 2,
|
||||||
borderLeftWidth: i === 0 ? 1 : 0,
|
borderLeftWidth: i === 0 ? 1 : 0,
|
||||||
marginLeft: i === 0 ? "-1px" : 0,
|
marginLeft: i === 0 ? "-1px" : 0,
|
||||||
|
backgroundColor: "gray.100",
|
||||||
};
|
};
|
||||||
return (
|
return (
|
||||||
<Fragment key={variant.uiId}>
|
<Fragment key={variant.uiId}>
|
||||||
|
|||||||
@@ -1,11 +1,4 @@
|
|||||||
import { type GridItemProps, type SystemStyleObject } from "@chakra-ui/react";
|
import { type GridItemProps } from "@chakra-ui/react";
|
||||||
|
|
||||||
export const stickyHeaderStyle: SystemStyleObject = {
|
|
||||||
position: "sticky",
|
|
||||||
top: "0",
|
|
||||||
backgroundColor: "#fff",
|
|
||||||
zIndex: 10,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const borders: GridItemProps = {
|
export const borders: GridItemProps = {
|
||||||
borderRightWidth: 1,
|
borderRightWidth: 1,
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
|||||||
import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here
|
import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here
|
||||||
import { cellPadding, headerMinHeight } from "../constants";
|
import { cellPadding, headerMinHeight } from "../constants";
|
||||||
import AutoResizeTextArea from "../AutoResizeTextArea";
|
import AutoResizeTextArea from "../AutoResizeTextArea";
|
||||||
import { stickyHeaderStyle } from "../OutputsTable/styles";
|
|
||||||
import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
|
import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
|
||||||
|
|
||||||
export default function VariantHeader(
|
export default function VariantHeader(
|
||||||
@@ -53,7 +52,17 @@ export default function VariantHeader(
|
|||||||
|
|
||||||
if (!canModify) {
|
if (!canModify) {
|
||||||
return (
|
return (
|
||||||
<GridItem padding={0} sx={stickyHeaderStyle} borderTopWidth={1} {...gridItemProps}>
|
<GridItem
|
||||||
|
padding={0}
|
||||||
|
sx={{
|
||||||
|
position: "sticky",
|
||||||
|
top: "0",
|
||||||
|
// Ensure that the menu always appears above the sticky header of other variants
|
||||||
|
zIndex: menuOpen ? "dropdown" : 10,
|
||||||
|
}}
|
||||||
|
borderTopWidth={1}
|
||||||
|
{...gridItemProps}
|
||||||
|
>
|
||||||
<Text fontSize={16} fontWeight="bold" px={cellPadding.x} py={cellPadding.y}>
|
<Text fontSize={16} fontWeight="bold" px={cellPadding.x} py={cellPadding.y}>
|
||||||
{variant.label}
|
{variant.label}
|
||||||
</Text>
|
</Text>
|
||||||
@@ -65,15 +74,16 @@ export default function VariantHeader(
|
|||||||
<GridItem
|
<GridItem
|
||||||
padding={0}
|
padding={0}
|
||||||
sx={{
|
sx={{
|
||||||
...stickyHeaderStyle,
|
position: "sticky",
|
||||||
|
top: "0",
|
||||||
// Ensure that the menu always appears above the sticky header of other variants
|
// Ensure that the menu always appears above the sticky header of other variants
|
||||||
zIndex: menuOpen ? "dropdown" : stickyHeaderStyle.zIndex,
|
zIndex: menuOpen ? "dropdown" : 10,
|
||||||
}}
|
}}
|
||||||
borderTopWidth={1}
|
borderTopWidth={1}
|
||||||
{...gridItemProps}
|
{...gridItemProps}
|
||||||
>
|
>
|
||||||
<HStack
|
<HStack
|
||||||
spacing={4}
|
spacing={2}
|
||||||
alignItems="flex-start"
|
alignItems="flex-start"
|
||||||
minH={headerMinHeight}
|
minH={headerMinHeight}
|
||||||
draggable={!isInputHovered}
|
draggable={!isInputHovered}
|
||||||
@@ -92,7 +102,8 @@ export default function VariantHeader(
|
|||||||
setIsDragTarget(false);
|
setIsDragTarget(false);
|
||||||
}}
|
}}
|
||||||
onDrop={onReorder}
|
onDrop={onReorder}
|
||||||
backgroundColor={isDragTarget ? "gray.100" : "transparent"}
|
backgroundColor={isDragTarget ? "gray.200" : "gray.100"}
|
||||||
|
h="full"
|
||||||
>
|
>
|
||||||
<Icon
|
<Icon
|
||||||
as={RiDraggable}
|
as={RiDraggable}
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ export const HeaderButtons = () => {
|
|||||||
colorScheme={canModify ? undefined : "orange"}
|
colorScheme={canModify ? undefined : "orange"}
|
||||||
bgColor={canModify ? undefined : "orange.400"}
|
bgColor={canModify ? undefined : "orange.400"}
|
||||||
minW={0}
|
minW={0}
|
||||||
variant={canModify ? "ghost" : "solid"}
|
variant={{ base: "solid", md: canModify ? "ghost" : "solid" }}
|
||||||
>
|
>
|
||||||
{isForking ? <Spinner boxSize={5} /> : <Icon as={TbGitFork} boxSize={5} />}
|
{isForking ? <Spinner boxSize={5} /> : <Icon as={TbGitFork} boxSize={5} />}
|
||||||
<Text ml={2}>Fork</Text>
|
<Text ml={2}>Fork</Text>
|
||||||
|
|||||||
@@ -84,7 +84,11 @@ const NavSidebar = () => {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</VStack>
|
</VStack>
|
||||||
{user ? <UserMenu user={user} /> : <Divider />}
|
{user ? (
|
||||||
|
<UserMenu user={user} borderColor={"gray.200"} borderTopWidth={1} borderBottomWidth={1} />
|
||||||
|
) : (
|
||||||
|
<Divider />
|
||||||
|
)}
|
||||||
<VStack spacing={0} align="center">
|
<VStack spacing={0} align="center">
|
||||||
<Link
|
<Link
|
||||||
href="https://github.com/openpipe/openpipe"
|
href="https://github.com/openpipe/openpipe"
|
||||||
|
|||||||
@@ -8,12 +8,16 @@ import {
|
|||||||
PopoverTrigger,
|
PopoverTrigger,
|
||||||
PopoverContent,
|
PopoverContent,
|
||||||
Link,
|
Link,
|
||||||
|
useColorMode,
|
||||||
|
type StackProps,
|
||||||
} from "@chakra-ui/react";
|
} from "@chakra-ui/react";
|
||||||
import { type Session } from "next-auth";
|
import { type Session } from "next-auth";
|
||||||
import { signOut } from "next-auth/react";
|
import { signOut } from "next-auth/react";
|
||||||
import { BsBoxArrowRight, BsChevronRight, BsPersonCircle } from "react-icons/bs";
|
import { BsBoxArrowRight, BsChevronRight, BsPersonCircle } from "react-icons/bs";
|
||||||
|
|
||||||
export default function UserMenu({ user }: { user: Session }) {
|
export default function UserMenu({ user, ...rest }: { user: Session } & StackProps) {
|
||||||
|
const { colorMode } = useColorMode();
|
||||||
|
|
||||||
const profileImage = user.user.image ? (
|
const profileImage = user.user.image ? (
|
||||||
<Image src={user.user.image} alt="profile picture" boxSize={8} borderRadius="50%" />
|
<Image src={user.user.image} alt="profile picture" boxSize={8} borderRadius="50%" />
|
||||||
) : (
|
) : (
|
||||||
@@ -29,12 +33,10 @@ export default function UserMenu({ user }: { user: Session }) {
|
|||||||
px={3}
|
px={3}
|
||||||
spacing={3}
|
spacing={3}
|
||||||
py={2}
|
py={2}
|
||||||
borderColor={"gray.200"}
|
{...rest}
|
||||||
borderTopWidth={1}
|
|
||||||
borderBottomWidth={1}
|
|
||||||
cursor="pointer"
|
cursor="pointer"
|
||||||
_hover={{
|
_hover={{
|
||||||
bgColor: "gray.200",
|
bgColor: colorMode === "light" ? "gray.200" : "gray.700",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{profileImage}
|
{profileImage}
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ export const env = createEnv({
|
|||||||
GITHUB_CLIENT_SECRET: z.string().min(1),
|
GITHUB_CLIENT_SECRET: z.string().min(1),
|
||||||
OPENAI_API_KEY: z.string().min(1),
|
OPENAI_API_KEY: z.string().min(1),
|
||||||
REPLICATE_API_TOKEN: z.string().default("placeholder"),
|
REPLICATE_API_TOKEN: z.string().default("placeholder"),
|
||||||
|
ANTHROPIC_API_KEY: z.string().default("placeholder"),
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -44,6 +45,7 @@ export const env = createEnv({
|
|||||||
GITHUB_CLIENT_ID: process.env.GITHUB_CLIENT_ID,
|
GITHUB_CLIENT_ID: process.env.GITHUB_CLIENT_ID,
|
||||||
GITHUB_CLIENT_SECRET: process.env.GITHUB_CLIENT_SECRET,
|
GITHUB_CLIENT_SECRET: process.env.GITHUB_CLIENT_SECRET,
|
||||||
REPLICATE_API_TOKEN: process.env.REPLICATE_API_TOKEN,
|
REPLICATE_API_TOKEN: process.env.REPLICATE_API_TOKEN,
|
||||||
|
ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY,
|
||||||
},
|
},
|
||||||
/**
|
/**
|
||||||
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.
|
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.
|
||||||
|
|||||||
69
src/modelProviders/anthropic/codegen/codegen.ts
Normal file
69
src/modelProviders/anthropic/codegen/codegen.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||||
|
|
||||||
|
import YAML from "yaml";
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
import { openapiSchemaToJsonSchema } from "@openapi-contrib/openapi-schema-to-json-schema";
|
||||||
|
import $RefParser from "@apidevtools/json-schema-ref-parser";
|
||||||
|
import { type JSONObject } from "superjson/dist/types";
|
||||||
|
import assert from "assert";
|
||||||
|
import { type JSONSchema4Object } from "json-schema";
|
||||||
|
import { isObject } from "lodash-es";
|
||||||
|
|
||||||
|
// @ts-expect-error for some reason missing from types
|
||||||
|
import parserEstree from "prettier/plugins/estree";
|
||||||
|
import parserBabel from "prettier/plugins/babel";
|
||||||
|
import prettier from "prettier/standalone";
|
||||||
|
|
||||||
|
const OPENAPI_URL =
|
||||||
|
"https://raw.githubusercontent.com/tryAGI/Anthropic/1c0871e861de60a4c3a843cb90e17d63e86c234a/docs/openapi.yaml";
|
||||||
|
|
||||||
|
// Fetch the openapi document
|
||||||
|
const response = await fetch(OPENAPI_URL);
|
||||||
|
const openApiYaml = await response.text();
|
||||||
|
|
||||||
|
// Parse the yaml document
|
||||||
|
let schema = YAML.parse(openApiYaml) as JSONObject;
|
||||||
|
schema = openapiSchemaToJsonSchema(schema);
|
||||||
|
|
||||||
|
const jsonSchema = await $RefParser.dereference(schema);
|
||||||
|
|
||||||
|
assert("components" in jsonSchema);
|
||||||
|
const completionRequestSchema = jsonSchema.components.schemas
|
||||||
|
.CreateCompletionRequest as JSONSchema4Object;
|
||||||
|
|
||||||
|
// We need to do a bit of surgery here since the Monaco editor doesn't like
|
||||||
|
// the fact that the schema says `model` can be either a string or an enum,
|
||||||
|
// and displays a warning in the editor. Let's stick with just an enum for
|
||||||
|
// now and drop the string option.
|
||||||
|
assert(
|
||||||
|
"properties" in completionRequestSchema &&
|
||||||
|
isObject(completionRequestSchema.properties) &&
|
||||||
|
"model" in completionRequestSchema.properties &&
|
||||||
|
isObject(completionRequestSchema.properties.model),
|
||||||
|
);
|
||||||
|
|
||||||
|
const modelProperty = completionRequestSchema.properties.model;
|
||||||
|
assert(
|
||||||
|
"oneOf" in modelProperty &&
|
||||||
|
Array.isArray(modelProperty.oneOf) &&
|
||||||
|
modelProperty.oneOf.length === 2 &&
|
||||||
|
isObject(modelProperty.oneOf[1]) &&
|
||||||
|
"enum" in modelProperty.oneOf[1],
|
||||||
|
"Expected model to have oneOf length of 2",
|
||||||
|
);
|
||||||
|
modelProperty.type = "string";
|
||||||
|
modelProperty.enum = modelProperty.oneOf[1].enum;
|
||||||
|
delete modelProperty["oneOf"];
|
||||||
|
|
||||||
|
// Get the directory of the current script
|
||||||
|
const currentDirectory = path.dirname(import.meta.url).replace("file://", "");
|
||||||
|
|
||||||
|
// Write the JSON schema to a file in the current directory
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(currentDirectory, "input.schema.json"),
|
||||||
|
await prettier.format(JSON.stringify(completionRequestSchema, null, 2), {
|
||||||
|
parser: "json",
|
||||||
|
plugins: [parserBabel, parserEstree],
|
||||||
|
}),
|
||||||
|
);
|
||||||
129
src/modelProviders/anthropic/codegen/input.schema.json
Normal file
129
src/modelProviders/anthropic/codegen/input.schema.json
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"model": {
|
||||||
|
"description": "The model that will complete your prompt.\nAs we improve Claude, we develop new versions of it that you can query.\nThis parameter controls which version of Claude answers your request.\nRight now we are offering two model families: Claude, and Claude Instant.\nYou can use them by setting model to \"claude-2\" or \"claude-instant-1\", respectively.\nSee models for additional details.\n",
|
||||||
|
"x-oaiTypeLabel": "string",
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"claude-2",
|
||||||
|
"claude-2.0",
|
||||||
|
"claude-instant-1",
|
||||||
|
"claude-instant-1.1"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"prompt": {
|
||||||
|
"description": "The prompt that you want Claude to complete.\n\nFor proper response generation you will need to format your prompt as follows:\n\\n\\nHuman: ${userQuestion}\\n\\nAssistant:\nSee our comments on prompts for more context.\n",
|
||||||
|
"default": "<|endoftext|>",
|
||||||
|
"nullable": true,
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"default": "",
|
||||||
|
"example": "This is a test."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "",
|
||||||
|
"example": "This is a test."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"example": "[1212, 318, 257, 1332, 13]"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"example": "[[1212, 318, 257, 1332, 13]]"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"max_tokens_to_sample": {
|
||||||
|
"type": "integer",
|
||||||
|
"minimum": 1,
|
||||||
|
"default": 256,
|
||||||
|
"example": 256,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "The maximum number of tokens to generate before stopping.\n\nNote that our models may stop before reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.\n"
|
||||||
|
},
|
||||||
|
"temperature": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"maximum": 1,
|
||||||
|
"default": 1,
|
||||||
|
"example": 1,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Amount of randomness injected into the response.\n\nDefaults to 1. Ranges from 0 to 1. Use temp closer to 0 for analytical / multiple choice, and closer to 1 for creative and generative tasks.\n"
|
||||||
|
},
|
||||||
|
"top_p": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"maximum": 1,
|
||||||
|
"default": 1,
|
||||||
|
"example": 1,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Use nucleus sampling.\n\nIn nucleus sampling, we compute the cumulative distribution over all the options \nfor each subsequent token in decreasing probability order and cut it off once \nit reaches a particular probability specified by top_p. You should either alter temperature or top_p, but not both.\n"
|
||||||
|
},
|
||||||
|
"top_k": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"default": 5,
|
||||||
|
"example": 5,
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Only sample from the top K options for each subsequent token.\n\nUsed to remove \"long tail\" low probability responses. Learn more technical details here.\n"
|
||||||
|
},
|
||||||
|
"stream": {
|
||||||
|
"description": "Whether to incrementally stream the response using server-sent events.\nSee this guide to SSE events for details.type: boolean\n",
|
||||||
|
"nullable": true,
|
||||||
|
"default": false
|
||||||
|
},
|
||||||
|
"stop_sequences": {
|
||||||
|
"description": "Sequences that will cause the model to stop generating completion text.\nOur models stop on \"\\n\\nHuman:\", and may include additional built-in stop sequences in the future. By providing the stop_sequences parameter, you may include additional strings that will cause the model to stop generating.\n",
|
||||||
|
"default": null,
|
||||||
|
"nullable": true,
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"default": "<|endoftext|>",
|
||||||
|
"example": "\n",
|
||||||
|
"nullable": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"maxItems": 4,
|
||||||
|
"items": {
|
||||||
|
"type": "string",
|
||||||
|
"example": "[\"\\n\"]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"user_id": {
|
||||||
|
"type": "string",
|
||||||
|
"example": "13803d75-b4b5-4c3e-b2a2-6f21399b021b",
|
||||||
|
"description": "An external identifier for the user who is associated with the request.\n\nThis should be a uuid, hash value, or other opaque identifier. Anthropic may use this id to help detect abuse. \nDo not include any identifying information such as name, email address, or phone number.\n"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "An object describing metadata about the request.\n"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["model", "prompt", "max_tokens_to_sample"]
|
||||||
|
}
|
||||||
40
src/modelProviders/anthropic/frontend.ts
Normal file
40
src/modelProviders/anthropic/frontend.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import { type Completion } from "@anthropic-ai/sdk/resources";
|
||||||
|
import { type SupportedModel } from ".";
|
||||||
|
import { type FrontendModelProvider } from "../types";
|
||||||
|
import { refinementActions } from "./refinementActions";
|
||||||
|
|
||||||
|
const frontendModelProvider: FrontendModelProvider<SupportedModel, Completion> = {
|
||||||
|
name: "Replicate Llama2",
|
||||||
|
|
||||||
|
models: {
|
||||||
|
"claude-2.0": {
|
||||||
|
name: "Claude 2.0",
|
||||||
|
contextWindow: 100000,
|
||||||
|
promptTokenPrice: 11.02 / 1000000,
|
||||||
|
completionTokenPrice: 32.68 / 1000000,
|
||||||
|
speed: "medium",
|
||||||
|
provider: "anthropic",
|
||||||
|
learnMoreUrl: "https://www.anthropic.com/product",
|
||||||
|
},
|
||||||
|
"claude-instant-1.1": {
|
||||||
|
name: "Claude Instant 1.1",
|
||||||
|
contextWindow: 100000,
|
||||||
|
promptTokenPrice: 1.63 / 1000000,
|
||||||
|
completionTokenPrice: 5.51 / 1000000,
|
||||||
|
speed: "fast",
|
||||||
|
provider: "anthropic",
|
||||||
|
learnMoreUrl: "https://www.anthropic.com/product",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
refinementActions,
|
||||||
|
|
||||||
|
normalizeOutput: (output) => {
|
||||||
|
return {
|
||||||
|
type: "text",
|
||||||
|
value: output.completion,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default frontendModelProvider;
|
||||||
86
src/modelProviders/anthropic/getCompletion.ts
Normal file
86
src/modelProviders/anthropic/getCompletion.ts
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import { env } from "~/env.mjs";
|
||||||
|
import { type CompletionResponse } from "../types";
|
||||||
|
|
||||||
|
import Anthropic, { APIError } from "@anthropic-ai/sdk";
|
||||||
|
import { type Completion, type CompletionCreateParams } from "@anthropic-ai/sdk/resources";
|
||||||
|
import { isObject, isString } from "lodash-es";
|
||||||
|
|
||||||
|
const anthropic = new Anthropic({
|
||||||
|
apiKey: env.ANTHROPIC_API_KEY,
|
||||||
|
});
|
||||||
|
|
||||||
|
export async function getCompletion(
|
||||||
|
input: CompletionCreateParams,
|
||||||
|
onStream: ((partialOutput: Completion) => void) | null,
|
||||||
|
): Promise<CompletionResponse<Completion>> {
|
||||||
|
const start = Date.now();
|
||||||
|
let finalCompletion: Completion | null = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (onStream) {
|
||||||
|
const resp = await anthropic.completions.create(
|
||||||
|
{ ...input, stream: true },
|
||||||
|
{
|
||||||
|
maxRetries: 0,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
for await (const part of resp) {
|
||||||
|
if (finalCompletion === null) {
|
||||||
|
finalCompletion = part;
|
||||||
|
} else {
|
||||||
|
finalCompletion = { ...part, completion: finalCompletion.completion + part.completion };
|
||||||
|
}
|
||||||
|
onStream(finalCompletion);
|
||||||
|
}
|
||||||
|
if (!finalCompletion) {
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message: "Streaming failed to return a completion",
|
||||||
|
autoRetry: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const resp = await anthropic.completions.create(
|
||||||
|
{ ...input, stream: false },
|
||||||
|
{
|
||||||
|
maxRetries: 0,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
finalCompletion = resp;
|
||||||
|
}
|
||||||
|
const timeToComplete = Date.now() - start;
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "success",
|
||||||
|
statusCode: 200,
|
||||||
|
value: finalCompletion,
|
||||||
|
timeToComplete,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
console.log("CAUGHT ERROR", error);
|
||||||
|
if (error instanceof APIError) {
|
||||||
|
const message =
|
||||||
|
isObject(error.error) &&
|
||||||
|
"error" in error.error &&
|
||||||
|
isObject(error.error.error) &&
|
||||||
|
"message" in error.error.error &&
|
||||||
|
isString(error.error.error.message)
|
||||||
|
? error.error.error.message
|
||||||
|
: error.message;
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message,
|
||||||
|
autoRetry: error.status === 429 || error.status === 503,
|
||||||
|
statusCode: error.status,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
type: "error",
|
||||||
|
message: (error as Error).message,
|
||||||
|
autoRetry: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
34
src/modelProviders/anthropic/index.ts
Normal file
34
src/modelProviders/anthropic/index.ts
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import { type JSONSchema4 } from "json-schema";
|
||||||
|
import { type ModelProvider } from "../types";
|
||||||
|
import inputSchema from "./codegen/input.schema.json";
|
||||||
|
import { getCompletion } from "./getCompletion";
|
||||||
|
import frontendModelProvider from "./frontend";
|
||||||
|
import type { Completion, CompletionCreateParams } from "@anthropic-ai/sdk/resources";
|
||||||
|
|
||||||
|
const supportedModels = ["claude-2.0", "claude-instant-1.1"] as const;
|
||||||
|
|
||||||
|
export type SupportedModel = (typeof supportedModels)[number];
|
||||||
|
|
||||||
|
export type AnthropicProvider = ModelProvider<SupportedModel, CompletionCreateParams, Completion>;
|
||||||
|
|
||||||
|
const modelProvider: AnthropicProvider = {
|
||||||
|
getModel: (input) => {
|
||||||
|
if (supportedModels.includes(input.model as SupportedModel))
|
||||||
|
return input.model as SupportedModel;
|
||||||
|
|
||||||
|
const modelMaps: Record<string, SupportedModel> = {
|
||||||
|
"claude-2": "claude-2.0",
|
||||||
|
"claude-instant-1": "claude-instant-1.1",
|
||||||
|
};
|
||||||
|
|
||||||
|
if (input.model in modelMaps) return modelMaps[input.model] as SupportedModel;
|
||||||
|
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
inputSchema: inputSchema as JSONSchema4,
|
||||||
|
canStream: true,
|
||||||
|
getCompletion,
|
||||||
|
...frontendModelProvider,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default modelProvider;
|
||||||
3
src/modelProviders/anthropic/refinementActions.ts
Normal file
3
src/modelProviders/anthropic/refinementActions.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
import { type RefinementAction } from "../types";
|
||||||
|
|
||||||
|
export const refinementActions: Record<string, RefinementAction> = {};
|
||||||
@@ -1,15 +1,15 @@
|
|||||||
import openaiChatCompletionFrontend from "./openai-ChatCompletion/frontend";
|
import openaiChatCompletionFrontend from "./openai-ChatCompletion/frontend";
|
||||||
import replicateLlama2Frontend from "./replicate-llama2/frontend";
|
import replicateLlama2Frontend from "./replicate-llama2/frontend";
|
||||||
|
import anthropicFrontend from "./anthropic/frontend";
|
||||||
import { type SupportedProvider, type FrontendModelProvider } from "./types";
|
import { type SupportedProvider, type FrontendModelProvider } from "./types";
|
||||||
|
|
||||||
// TODO: make sure we get a typescript error if you forget to add a provider here
|
|
||||||
|
|
||||||
// Keep attributes here that need to be accessible from the frontend. We can't
|
// Keep attributes here that need to be accessible from the frontend. We can't
|
||||||
// just include them in the default `modelProviders` object because it has some
|
// just include them in the default `modelProviders` object because it has some
|
||||||
// transient dependencies that can only be imported on the server.
|
// transient dependencies that can only be imported on the server.
|
||||||
const frontendModelProviders: Record<SupportedProvider, FrontendModelProvider<any, any>> = {
|
const frontendModelProviders: Record<SupportedProvider, FrontendModelProvider<any, any>> = {
|
||||||
"openai/ChatCompletion": openaiChatCompletionFrontend,
|
"openai/ChatCompletion": openaiChatCompletionFrontend,
|
||||||
"replicate/llama2": replicateLlama2Frontend,
|
"replicate/llama2": replicateLlama2Frontend,
|
||||||
|
anthropic: anthropicFrontend,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default frontendModelProviders;
|
export default frontendModelProviders;
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
import openaiChatCompletion from "./openai-ChatCompletion";
|
import openaiChatCompletion from "./openai-ChatCompletion";
|
||||||
import replicateLlama2 from "./replicate-llama2";
|
import replicateLlama2 from "./replicate-llama2";
|
||||||
|
import anthropic from "./anthropic";
|
||||||
import { type SupportedProvider, type ModelProvider } from "./types";
|
import { type SupportedProvider, type ModelProvider } from "./types";
|
||||||
|
|
||||||
const modelProviders: Record<SupportedProvider, ModelProvider<any, any, any>> = {
|
const modelProviders: Record<SupportedProvider, ModelProvider<any, any, any>> = {
|
||||||
"openai/ChatCompletion": openaiChatCompletion,
|
"openai/ChatCompletion": openaiChatCompletion,
|
||||||
"replicate/llama2": replicateLlama2,
|
"replicate/llama2": replicateLlama2,
|
||||||
|
anthropic,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default modelProviders;
|
export default modelProviders;
|
||||||
|
|||||||
@@ -120,7 +120,6 @@ export async function getCompletion(
|
|||||||
cost,
|
cost,
|
||||||
};
|
};
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
console.error("ERROR IS", error);
|
|
||||||
if (error instanceof APIError) {
|
if (error instanceof APIError) {
|
||||||
return {
|
return {
|
||||||
type: "error",
|
type: "error",
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ const replicate = new Replicate({
|
|||||||
const modelIds: Record<ReplicateLlama2Input["model"], string> = {
|
const modelIds: Record<ReplicateLlama2Input["model"], string> = {
|
||||||
"7b-chat": "5ec5fdadd80ace49f5a2b2178cceeb9f2f77c493b85b1131002c26e6b2b13184",
|
"7b-chat": "5ec5fdadd80ace49f5a2b2178cceeb9f2f77c493b85b1131002c26e6b2b13184",
|
||||||
"13b-chat": "6b4da803a2382c08868c5af10a523892f38e2de1aafb2ee55b020d9efef2fdb8",
|
"13b-chat": "6b4da803a2382c08868c5af10a523892f38e2de1aafb2ee55b020d9efef2fdb8",
|
||||||
"70b-chat": "2d19859030ff705a87c746f7e96eea03aefb71f166725aee39692f1476566d48",
|
"70b-chat": "2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1",
|
||||||
};
|
};
|
||||||
|
|
||||||
export async function getCompletion(
|
export async function getCompletion(
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import { z } from "zod";
|
|||||||
export const ZodSupportedProvider = z.union([
|
export const ZodSupportedProvider = z.union([
|
||||||
z.literal("openai/ChatCompletion"),
|
z.literal("openai/ChatCompletion"),
|
||||||
z.literal("replicate/llama2"),
|
z.literal("replicate/llama2"),
|
||||||
|
z.literal("anthropic"),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export type SupportedProvider = z.infer<typeof ZodSupportedProvider>;
|
export type SupportedProvider = z.infer<typeof ZodSupportedProvider>;
|
||||||
|
|||||||
@@ -21,6 +21,17 @@ const MyApp: AppType<{ session: Session | null }> = ({
|
|||||||
name="viewport"
|
name="viewport"
|
||||||
content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=0"
|
content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=0"
|
||||||
/>
|
/>
|
||||||
|
<meta name="og:title" content="OpenPipe: Open-Source Lab for LLMs" key="title" />
|
||||||
|
<meta
|
||||||
|
name="og:description"
|
||||||
|
content="OpenPipe is a powerful playground for quickly optimizing performance, cost, and speed across models."
|
||||||
|
key="description"
|
||||||
|
/>
|
||||||
|
<meta name="og:image" content="/og.png" key="og-image" />
|
||||||
|
<meta property="og:image:height" content="630" />
|
||||||
|
<meta property="og:image:width" content="1200" />
|
||||||
|
<meta name="twitter:card" content="summary_large_image" />
|
||||||
|
<meta name="twitter:image" content="/og.png" />
|
||||||
</Head>
|
</Head>
|
||||||
<SessionProvider session={session}>
|
<SessionProvider session={session}>
|
||||||
<SyncAppStore />
|
<SyncAppStore />
|
||||||
|
|||||||
81
src/pages/api/experiments/og-image.tsx
Normal file
81
src/pages/api/experiments/og-image.tsx
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import { ImageResponse } from "@vercel/og";
|
||||||
|
import { type NextApiRequest, type NextApiResponse } from "next";
|
||||||
|
|
||||||
|
export const config = {
|
||||||
|
runtime: "experimental-edge",
|
||||||
|
};
|
||||||
|
|
||||||
|
const inconsolataRegularFontP = fetch(
|
||||||
|
new URL("../../../../public/fonts/Inconsolata_SemiExpanded-Medium.ttf", import.meta.url),
|
||||||
|
).then((res) => res.arrayBuffer());
|
||||||
|
|
||||||
|
const OgImage = async (req: NextApiRequest, res: NextApiResponse) => {
|
||||||
|
// @ts-expect-error - nextUrl is not defined on NextApiRequest for some reason
|
||||||
|
const searchParams = req.nextUrl?.searchParams as URLSearchParams;
|
||||||
|
const experimentLabel = searchParams.get("experimentLabel");
|
||||||
|
const variantsCount = searchParams.get("variantsCount");
|
||||||
|
const scenariosCount = searchParams.get("scenariosCount");
|
||||||
|
|
||||||
|
const inconsolataRegularFont = await inconsolataRegularFontP;
|
||||||
|
|
||||||
|
return new ImageResponse(
|
||||||
|
(
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
width: "100%",
|
||||||
|
height: "100%",
|
||||||
|
display: "flex",
|
||||||
|
flexDirection: "column",
|
||||||
|
alignItems: "center",
|
||||||
|
justifyContent: "center",
|
||||||
|
fontSize: 48,
|
||||||
|
padding: "48px",
|
||||||
|
background: "white",
|
||||||
|
position: "relative",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
position: "absolute",
|
||||||
|
top: 0,
|
||||||
|
left: 0,
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
padding: 48,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{/* eslint-disable-next-line @next/next/no-img-element */}
|
||||||
|
<img
|
||||||
|
src="https://app.openpipe.ai/logo.svg"
|
||||||
|
alt="OpenPipe Logo"
|
||||||
|
height={100}
|
||||||
|
width={120}
|
||||||
|
/>
|
||||||
|
<div style={{ marginLeft: 24, fontSize: 64, fontFamily: "Inconsolata" }}>OpenPipe</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div style={{ display: "flex", fontSize: 72, marginTop: 108 }}>{experimentLabel}</div>
|
||||||
|
<div style={{ display: "flex", flexDirection: "column", marginTop: 36 }}>
|
||||||
|
<div style={{ display: "flex" }}>
|
||||||
|
<span style={{ width: 320 }}>Variants:</span> {variantsCount}
|
||||||
|
</div>
|
||||||
|
<div style={{ display: "flex", marginTop: 24 }}>
|
||||||
|
<span style={{ width: 320 }}>Scenarios:</span> {scenariosCount}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
),
|
||||||
|
{
|
||||||
|
fonts: [
|
||||||
|
{
|
||||||
|
name: "inconsolata",
|
||||||
|
data: inconsolataRegularFont,
|
||||||
|
style: "normal",
|
||||||
|
weight: 400,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default OgImage;
|
||||||
@@ -22,13 +22,42 @@ import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
|||||||
import { useAppStore } from "~/state/store";
|
import { useAppStore } from "~/state/store";
|
||||||
import { useSyncVariantEditor } from "~/state/sync";
|
import { useSyncVariantEditor } from "~/state/sync";
|
||||||
import { HeaderButtons } from "~/components/experiments/HeaderButtons/HeaderButtons";
|
import { HeaderButtons } from "~/components/experiments/HeaderButtons/HeaderButtons";
|
||||||
|
import Head from "next/head";
|
||||||
|
|
||||||
|
// TODO: import less to fix deployment with server side props
|
||||||
|
// export const getServerSideProps = async (context: GetServerSidePropsContext<{ id: string }>) => {
|
||||||
|
// const experimentId = context.params?.id as string;
|
||||||
|
|
||||||
|
// const helpers = createServerSideHelpers({
|
||||||
|
// router: appRouter,
|
||||||
|
// ctx: createInnerTRPCContext({ session: null }),
|
||||||
|
// transformer: superjson, // optional - adds superjson serialization
|
||||||
|
// });
|
||||||
|
|
||||||
|
// // prefetch query
|
||||||
|
// await helpers.experiments.stats.prefetch({ id: experimentId });
|
||||||
|
|
||||||
|
// return {
|
||||||
|
// props: {
|
||||||
|
// trpcState: helpers.dehydrate(),
|
||||||
|
// },
|
||||||
|
// };
|
||||||
|
// };
|
||||||
|
|
||||||
export default function Experiment() {
|
export default function Experiment() {
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const experiment = useExperiment();
|
|
||||||
const utils = api.useContext();
|
const utils = api.useContext();
|
||||||
useSyncVariantEditor();
|
useSyncVariantEditor();
|
||||||
|
|
||||||
|
const experiment = useExperiment();
|
||||||
|
const experimentStats = api.experiments.stats.useQuery(
|
||||||
|
{ id: router.query.id as string },
|
||||||
|
{
|
||||||
|
enabled: !!router.query.id,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
const stats = experimentStats.data;
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
useAppStore.getState().sharedVariantEditor.loadMonaco().catch(console.error);
|
useAppStore.getState().sharedVariantEditor.loadMonaco().catch(console.error);
|
||||||
});
|
});
|
||||||
@@ -62,6 +91,17 @@ export default function Experiment() {
|
|||||||
const canModify = experiment.data?.access.canModify ?? false;
|
const canModify = experiment.data?.access.canModify ?? false;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
<>
|
||||||
|
{stats && (
|
||||||
|
<Head>
|
||||||
|
<meta property="og:title" content={stats.experimentLabel} key="title" />
|
||||||
|
<meta
|
||||||
|
property="og:image"
|
||||||
|
content={`/api/experiments/og-image?experimentLabel=${stats.experimentLabel}&variantsCount=${stats.promptVariantCount}&scenariosCount=${stats.testScenarioCount}`}
|
||||||
|
key="og-image"
|
||||||
|
/>
|
||||||
|
</Head>
|
||||||
|
)}
|
||||||
<AppShell title={experiment.data?.label}>
|
<AppShell title={experiment.data?.label}>
|
||||||
<VStack h="full">
|
<VStack h="full">
|
||||||
<Flex
|
<Flex
|
||||||
@@ -110,5 +150,6 @@ export default function Experiment() {
|
|||||||
</Box>
|
</Box>
|
||||||
</VStack>
|
</VStack>
|
||||||
</AppShell>
|
</AppShell>
|
||||||
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
15
src/pages/world-champs/index.tsx
Normal file
15
src/pages/world-champs/index.tsx
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { type GetServerSideProps } from "next";
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/require-await
|
||||||
|
export const getServerSideProps: GetServerSideProps = async () => {
|
||||||
|
return {
|
||||||
|
redirect: {
|
||||||
|
destination: "/world-champs/signup",
|
||||||
|
permanent: false,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function WorldChamps() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
201
src/pages/world-champs/signup.tsx
Normal file
201
src/pages/world-champs/signup.tsx
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
import {
|
||||||
|
Box,
|
||||||
|
type BoxProps,
|
||||||
|
Button,
|
||||||
|
DarkMode,
|
||||||
|
GlobalStyle,
|
||||||
|
HStack,
|
||||||
|
Heading,
|
||||||
|
Icon,
|
||||||
|
Link,
|
||||||
|
Table,
|
||||||
|
Tbody,
|
||||||
|
Td,
|
||||||
|
Text,
|
||||||
|
type TextProps,
|
||||||
|
Th,
|
||||||
|
Tr,
|
||||||
|
VStack,
|
||||||
|
useInterval,
|
||||||
|
} from "@chakra-ui/react";
|
||||||
|
import { signIn, useSession } from "next-auth/react";
|
||||||
|
import Head from "next/head";
|
||||||
|
import { useCallback, useState } from "react";
|
||||||
|
import { BsGithub } from "react-icons/bs";
|
||||||
|
import UserMenu from "~/components/nav/UserMenu";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import dayjs from "~/utils/dayjs";
|
||||||
|
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
|
|
||||||
|
// Shows how long until the competition starts. Refreshes every second
|
||||||
|
function CountdownTimer(props: { date: Date } & TextProps) {
|
||||||
|
const [now, setNow] = useState(dayjs(0));
|
||||||
|
|
||||||
|
useInterval(() => {
|
||||||
|
setNow(dayjs());
|
||||||
|
}, 1000);
|
||||||
|
|
||||||
|
const { date, ...rest } = props;
|
||||||
|
|
||||||
|
const kickoff = dayjs(props.date);
|
||||||
|
const diff = kickoff.diff(now, "second");
|
||||||
|
const days = Math.floor(diff / 86400);
|
||||||
|
const hours = Math.floor((diff % 86400) / 3600);
|
||||||
|
const minutes = Math.floor((diff % 3600) / 60);
|
||||||
|
const seconds = Math.floor(diff % 60);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Text {...rest}>
|
||||||
|
<Text as="span" fontWeight="bold">
|
||||||
|
Kickoff in
|
||||||
|
</Text>{" "}
|
||||||
|
{days}d {hours}h {minutes}m {seconds}s
|
||||||
|
</Text>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function ApplicationStatus(props: BoxProps) {
|
||||||
|
const user = useSession().data;
|
||||||
|
const entrant = api.worldChamps.userStatus.useQuery().data;
|
||||||
|
const applyMutation = api.worldChamps.apply.useMutation();
|
||||||
|
|
||||||
|
const utils = api.useContext();
|
||||||
|
|
||||||
|
const [onSignIn] = useHandledAsyncCallback(async () => {
|
||||||
|
await signIn("github");
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const [onApply] = useHandledAsyncCallback(async () => {
|
||||||
|
await applyMutation.mutateAsync();
|
||||||
|
await utils.worldChamps.userStatus.invalidate();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const Wrapper = useCallback(
|
||||||
|
(wrapperProps: BoxProps) => (
|
||||||
|
<Box {...props} {...wrapperProps} minH="120px" alignItems="center" justifyItems="center" />
|
||||||
|
),
|
||||||
|
[props],
|
||||||
|
);
|
||||||
|
|
||||||
|
if (user === null) {
|
||||||
|
return (
|
||||||
|
<Wrapper>
|
||||||
|
<Button onClick={onSignIn} colorScheme="orange" leftIcon={<Icon as={BsGithub} />}>
|
||||||
|
Connect GitHub to apply
|
||||||
|
</Button>
|
||||||
|
</Wrapper>
|
||||||
|
);
|
||||||
|
} else if (user) {
|
||||||
|
return (
|
||||||
|
<Wrapper>
|
||||||
|
<HStack spacing={8}>
|
||||||
|
<UserMenu user={user} borderRadius={2} borderColor={"gray.700"} borderWidth={1} pr={6} />
|
||||||
|
<Box flex={1}>
|
||||||
|
{entrant?.approved ? (
|
||||||
|
<Text fontSize="sm">
|
||||||
|
You're accepted! We'll send you more details before August 14th.
|
||||||
|
</Text>
|
||||||
|
) : entrant ? (
|
||||||
|
<Text fontSize="sm">
|
||||||
|
Application submitted successfully! We'll notify you by email before August 14th.
|
||||||
|
</Text>
|
||||||
|
) : (
|
||||||
|
<Button onClick={onApply} colorScheme="orange">
|
||||||
|
Apply to compete
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</Box>
|
||||||
|
</HStack>
|
||||||
|
</Wrapper>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return <Wrapper />;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function Signup() {
|
||||||
|
return (
|
||||||
|
<DarkMode>
|
||||||
|
<GlobalStyle />
|
||||||
|
|
||||||
|
<Head>
|
||||||
|
<title>🏆 Prompt Engineering World Championships</title>
|
||||||
|
<meta property="og:title" content="🏆 Prompt Engineering World Championships" key="title" />
|
||||||
|
<meta
|
||||||
|
property="og:description"
|
||||||
|
content="Think you have what it takes to be the best? Compete with the world's top prompt engineers and see where you rank!"
|
||||||
|
key="description"
|
||||||
|
/>
|
||||||
|
</Head>
|
||||||
|
|
||||||
|
<Box bgColor="gray.900" color="gray.200" minH="100vh" w="full">
|
||||||
|
<VStack mx="auto" py={24} maxW="2xl" align="start" fontSize="lg">
|
||||||
|
<Heading size="lg">🏆 Prompt Engineering World Championships</Heading>
|
||||||
|
<CountdownTimer
|
||||||
|
date={new Date("2023-08-14T00:00:00Z")}
|
||||||
|
fontSize="2xl"
|
||||||
|
alignSelf="center"
|
||||||
|
color="gray.500"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<ApplicationStatus py={8} alignSelf="center" />
|
||||||
|
|
||||||
|
<Text fontSize="lg">
|
||||||
|
Think you have what it takes to be the best? Compete with the world's top prompt
|
||||||
|
engineers and see where you rank!
|
||||||
|
</Text>
|
||||||
|
|
||||||
|
<Heading size="lg" pt={12} alignSelf="left">
|
||||||
|
Event Details
|
||||||
|
</Heading>
|
||||||
|
<Table variant="simple">
|
||||||
|
<Tbody>
|
||||||
|
<Tr>
|
||||||
|
<Th>Kickoff</Th>
|
||||||
|
<Td>August 14</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Prize</Th>
|
||||||
|
<Td>$15,000 grand prize + smaller category prizes.</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Events</Th>
|
||||||
|
<Td>
|
||||||
|
Optimize prompts for multiple tasks selected from academic benchmarks and
|
||||||
|
real-world applications.
|
||||||
|
</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Models</Th>
|
||||||
|
<Td>Separate "weight classes" for GPT 3.5, Claude Instant, and Llama 2.</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Qualifications</Th>
|
||||||
|
<Td>Open to entrants with any level of experience.</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Certificates</Th>
|
||||||
|
<Td>Certificate of mastery for all qualifying participants.</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Cost</Th>
|
||||||
|
<Td>
|
||||||
|
<strong>Free</strong>. We'll cover your inference budget.
|
||||||
|
</Td>
|
||||||
|
</Tr>
|
||||||
|
<Tr>
|
||||||
|
<Th>Questions?</Th>
|
||||||
|
<Td>
|
||||||
|
<Link href="mailto:world-champs@openpipe.ai" textDecor="underline">
|
||||||
|
Email us
|
||||||
|
</Link>{" "}
|
||||||
|
with any follow-up questions!
|
||||||
|
</Td>
|
||||||
|
</Tr>
|
||||||
|
</Tbody>
|
||||||
|
</Table>
|
||||||
|
</VStack>
|
||||||
|
</Box>
|
||||||
|
</DarkMode>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -5,6 +5,7 @@ import { scenariosRouter } from "./routers/scenarios.router";
|
|||||||
import { scenarioVariantCellsRouter } from "./routers/scenarioVariantCells.router";
|
import { scenarioVariantCellsRouter } from "./routers/scenarioVariantCells.router";
|
||||||
import { templateVarsRouter } from "./routers/templateVariables.router";
|
import { templateVarsRouter } from "./routers/templateVariables.router";
|
||||||
import { evaluationsRouter } from "./routers/evaluations.router";
|
import { evaluationsRouter } from "./routers/evaluations.router";
|
||||||
|
import { worldChampsRouter } from "./routers/worldChamps.router";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is the primary router for your server.
|
* This is the primary router for your server.
|
||||||
@@ -18,6 +19,7 @@ export const appRouter = createTRPCRouter({
|
|||||||
scenarioVariantCells: scenarioVariantCellsRouter,
|
scenarioVariantCells: scenarioVariantCellsRouter,
|
||||||
templateVars: templateVarsRouter,
|
templateVars: templateVarsRouter,
|
||||||
evaluations: evaluationsRouter,
|
evaluations: evaluationsRouter,
|
||||||
|
worldChamps: worldChampsRouter,
|
||||||
});
|
});
|
||||||
|
|
||||||
// export type definition of API
|
// export type definition of API
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { EvalType } from "@prisma/client";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
||||||
import { prisma } from "~/server/db";
|
import { prisma } from "~/server/db";
|
||||||
import { runAllEvals } from "~/server/utils/evaluations";
|
import { queueRunNewEval } from "~/server/tasks/runNewEval.task";
|
||||||
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
|
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
|
||||||
|
|
||||||
export const evaluationsRouter = createTRPCRouter({
|
export const evaluationsRouter = createTRPCRouter({
|
||||||
@@ -40,9 +40,7 @@ export const evaluationsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: this may be a bad UX for slow evals (eg. GPT-4 evals) Maybe need
|
await queueRunNewEval(input.experimentId);
|
||||||
// to kick off a background job or something instead
|
|
||||||
await runAllEvals(input.experimentId);
|
|
||||||
}),
|
}),
|
||||||
|
|
||||||
update: protectedProcedure
|
update: protectedProcedure
|
||||||
@@ -78,7 +76,7 @@ export const evaluationsRouter = createTRPCRouter({
|
|||||||
});
|
});
|
||||||
// Re-run all evals. Other eval results will already be cached, so this
|
// Re-run all evals. Other eval results will already be cached, so this
|
||||||
// should only re-run the updated one.
|
// should only re-run the updated one.
|
||||||
await runAllEvals(evaluation.experimentId);
|
await queueRunNewEval(experimentId);
|
||||||
}),
|
}),
|
||||||
|
|
||||||
delete: protectedProcedure
|
delete: protectedProcedure
|
||||||
|
|||||||
@@ -15,6 +15,33 @@ import userOrg from "~/server/utils/userOrg";
|
|||||||
import generateTypes from "~/modelProviders/generateTypes";
|
import generateTypes from "~/modelProviders/generateTypes";
|
||||||
|
|
||||||
export const experimentsRouter = createTRPCRouter({
|
export const experimentsRouter = createTRPCRouter({
|
||||||
|
stats: publicProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
|
||||||
|
await requireCanViewExperiment(input.id, ctx);
|
||||||
|
|
||||||
|
const [experiment, promptVariantCount, testScenarioCount] = await prisma.$transaction([
|
||||||
|
prisma.experiment.findFirstOrThrow({
|
||||||
|
where: { id: input.id },
|
||||||
|
}),
|
||||||
|
prisma.promptVariant.count({
|
||||||
|
where: {
|
||||||
|
experimentId: input.id,
|
||||||
|
visible: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
prisma.testScenario.count({
|
||||||
|
where: {
|
||||||
|
experimentId: input.id,
|
||||||
|
visible: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
experimentLabel: experiment.label,
|
||||||
|
promptVariantCount,
|
||||||
|
testScenarioCount,
|
||||||
|
};
|
||||||
|
}),
|
||||||
list: protectedProcedure.query(async ({ ctx }) => {
|
list: protectedProcedure.query(async ({ ctx }) => {
|
||||||
// Anyone can list experiments
|
// Anyone can list experiments
|
||||||
requireNothing(ctx);
|
requireNothing(ctx);
|
||||||
@@ -118,7 +145,7 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
include: {
|
include: {
|
||||||
modelOutput: {
|
modelResponses: {
|
||||||
include: {
|
include: {
|
||||||
outputEvaluations: true,
|
outputEvaluations: true,
|
||||||
},
|
},
|
||||||
@@ -177,11 +204,11 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const cellsToCreate: Prisma.ScenarioVariantCellCreateManyInput[] = [];
|
const cellsToCreate: Prisma.ScenarioVariantCellCreateManyInput[] = [];
|
||||||
const modelOutputsToCreate: Prisma.ModelOutputCreateManyInput[] = [];
|
const modelResponsesToCreate: Prisma.ModelResponseCreateManyInput[] = [];
|
||||||
const outputEvaluationsToCreate: Prisma.OutputEvaluationCreateManyInput[] = [];
|
const outputEvaluationsToCreate: Prisma.OutputEvaluationCreateManyInput[] = [];
|
||||||
for (const cell of existingCells) {
|
for (const cell of existingCells) {
|
||||||
const newCellId = uuidv4();
|
const newCellId = uuidv4();
|
||||||
const { modelOutput, ...cellData } = cell;
|
const { modelResponses, ...cellData } = cell;
|
||||||
cellsToCreate.push({
|
cellsToCreate.push({
|
||||||
...cellData,
|
...cellData,
|
||||||
id: newCellId,
|
id: newCellId,
|
||||||
@@ -189,20 +216,20 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
testScenarioId: existingToNewScenarioIds.get(cell.testScenarioId) ?? "",
|
testScenarioId: existingToNewScenarioIds.get(cell.testScenarioId) ?? "",
|
||||||
prompt: (cell.prompt as Prisma.InputJsonValue) ?? undefined,
|
prompt: (cell.prompt as Prisma.InputJsonValue) ?? undefined,
|
||||||
});
|
});
|
||||||
if (modelOutput) {
|
for (const modelResponse of modelResponses) {
|
||||||
const newModelOutputId = uuidv4();
|
const newModelResponseId = uuidv4();
|
||||||
const { outputEvaluations, ...modelOutputData } = modelOutput;
|
const { outputEvaluations, ...modelResponseData } = modelResponse;
|
||||||
modelOutputsToCreate.push({
|
modelResponsesToCreate.push({
|
||||||
...modelOutputData,
|
...modelResponseData,
|
||||||
id: newModelOutputId,
|
id: newModelResponseId,
|
||||||
scenarioVariantCellId: newCellId,
|
scenarioVariantCellId: newCellId,
|
||||||
output: (modelOutput.output as Prisma.InputJsonValue) ?? undefined,
|
output: (modelResponse.output as Prisma.InputJsonValue) ?? undefined,
|
||||||
});
|
});
|
||||||
for (const evaluation of outputEvaluations) {
|
for (const evaluation of outputEvaluations) {
|
||||||
outputEvaluationsToCreate.push({
|
outputEvaluationsToCreate.push({
|
||||||
...evaluation,
|
...evaluation,
|
||||||
id: uuidv4(),
|
id: uuidv4(),
|
||||||
modelOutputId: newModelOutputId,
|
modelResponseId: newModelResponseId,
|
||||||
evaluationId: existingToNewEvaluationIds.get(evaluation.evaluationId) ?? "",
|
evaluationId: existingToNewEvaluationIds.get(evaluation.evaluationId) ?? "",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -245,8 +272,8 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
prisma.scenarioVariantCell.createMany({
|
prisma.scenarioVariantCell.createMany({
|
||||||
data: cellsToCreate,
|
data: cellsToCreate,
|
||||||
}),
|
}),
|
||||||
prisma.modelOutput.createMany({
|
prisma.modelResponse.createMany({
|
||||||
data: modelOutputsToCreate,
|
data: modelResponsesToCreate,
|
||||||
}),
|
}),
|
||||||
prisma.evaluation.createMany({
|
prisma.evaluation.createMany({
|
||||||
data: evaluationsToCreate,
|
data: evaluationsToCreate,
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
||||||
import { prisma } from "~/server/db";
|
import { prisma } from "~/server/db";
|
||||||
|
import { Prisma } from "@prisma/client";
|
||||||
import { generateNewCell } from "~/server/utils/generateNewCell";
|
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||||
import userError from "~/server/utils/error";
|
import userError from "~/server/utils/error";
|
||||||
import { recordExperimentUpdated } from "~/server/utils/recordExperimentUpdated";
|
import { recordExperimentUpdated } from "~/server/utils/recordExperimentUpdated";
|
||||||
@@ -51,7 +52,9 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
id: true,
|
id: true,
|
||||||
},
|
},
|
||||||
where: {
|
where: {
|
||||||
modelOutput: {
|
modelResponse: {
|
||||||
|
outdated: false,
|
||||||
|
output: { not: Prisma.AnyNull },
|
||||||
scenarioVariantCell: {
|
scenarioVariantCell: {
|
||||||
promptVariant: {
|
promptVariant: {
|
||||||
id: input.variantId,
|
id: input.variantId,
|
||||||
@@ -93,14 +96,23 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
where: {
|
where: {
|
||||||
promptVariantId: input.variantId,
|
promptVariantId: input.variantId,
|
||||||
testScenario: { visible: true },
|
testScenario: { visible: true },
|
||||||
modelOutput: {
|
modelResponses: {
|
||||||
is: {},
|
some: {
|
||||||
|
outdated: false,
|
||||||
|
output: {
|
||||||
|
not: Prisma.AnyNull,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const overallTokens = await prisma.modelOutput.aggregate({
|
const overallTokens = await prisma.modelResponse.aggregate({
|
||||||
where: {
|
where: {
|
||||||
|
outdated: false,
|
||||||
|
output: {
|
||||||
|
not: Prisma.AnyNull,
|
||||||
|
},
|
||||||
scenarioVariantCell: {
|
scenarioVariantCell: {
|
||||||
promptVariantId: input.variantId,
|
promptVariantId: input.variantId,
|
||||||
testScenario: {
|
testScenario: {
|
||||||
@@ -118,16 +130,9 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
const promptTokens = overallTokens._sum?.promptTokens ?? 0;
|
const promptTokens = overallTokens._sum?.promptTokens ?? 0;
|
||||||
const completionTokens = overallTokens._sum?.completionTokens ?? 0;
|
const completionTokens = overallTokens._sum?.completionTokens ?? 0;
|
||||||
|
|
||||||
const awaitingRetrievals = !!(await prisma.scenarioVariantCell.findFirst({
|
const awaitingEvals = !!evalResults.find(
|
||||||
where: {
|
(result) => result.totalCount < scenarioCount * evals.length,
|
||||||
promptVariantId: input.variantId,
|
);
|
||||||
testScenario: { visible: true },
|
|
||||||
// Check if is PENDING or IN_PROGRESS
|
|
||||||
retrievalStatus: {
|
|
||||||
in: ["PENDING", "IN_PROGRESS"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
evalResults,
|
evalResults,
|
||||||
@@ -136,7 +141,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
overallCost: overallTokens._sum?.cost ?? 0,
|
overallCost: overallTokens._sum?.cost ?? 0,
|
||||||
scenarioCount,
|
scenarioCount,
|
||||||
outputCount,
|
outputCount,
|
||||||
awaitingRetrievals,
|
awaitingEvals,
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,8 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
|||||||
});
|
});
|
||||||
await requireCanViewExperiment(experimentId, ctx);
|
await requireCanViewExperiment(experimentId, ctx);
|
||||||
|
|
||||||
return await prisma.scenarioVariantCell.findUnique({
|
const [cell, numTotalEvals] = await prisma.$transaction([
|
||||||
|
prisma.scenarioVariantCell.findUnique({
|
||||||
where: {
|
where: {
|
||||||
promptVariantId_testScenarioId: {
|
promptVariantId_testScenarioId: {
|
||||||
promptVariantId: input.variantId,
|
promptVariantId: input.variantId,
|
||||||
@@ -27,7 +28,10 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
include: {
|
include: {
|
||||||
modelOutput: {
|
modelResponses: {
|
||||||
|
where: {
|
||||||
|
outdated: false,
|
||||||
|
},
|
||||||
include: {
|
include: {
|
||||||
outputEvaluations: {
|
outputEvaluations: {
|
||||||
include: {
|
include: {
|
||||||
@@ -39,7 +43,21 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
}),
|
||||||
|
prisma.evaluation.count({
|
||||||
|
where: { experimentId },
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!cell) return null;
|
||||||
|
|
||||||
|
const lastResponse = cell.modelResponses?.[cell.modelResponses?.length - 1];
|
||||||
|
const evalsComplete = lastResponse?.outputEvaluations?.length === numTotalEvals;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...cell,
|
||||||
|
evalsComplete,
|
||||||
|
};
|
||||||
}),
|
}),
|
||||||
forceRefetch: protectedProcedure
|
forceRefetch: protectedProcedure
|
||||||
.input(
|
.input(
|
||||||
@@ -62,7 +80,6 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
|||||||
testScenarioId: input.scenarioId,
|
testScenarioId: input.scenarioId,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
include: { modelOutput: true },
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!cell) {
|
if (!cell) {
|
||||||
@@ -70,12 +87,12 @@ export const scenarioVariantCellsRouter = createTRPCRouter({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (cell.modelOutput) {
|
await prisma.modelResponse.updateMany({
|
||||||
// TODO: Maybe keep these around to show previous generations?
|
where: { scenarioVariantCellId: cell.id },
|
||||||
await prisma.modelOutput.delete({
|
data: {
|
||||||
where: { id: cell.modelOutput.id },
|
outdated: true,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
}
|
|
||||||
|
|
||||||
await queueQueryModel(cell.id, true);
|
await queueQueryModel(cell.id, true);
|
||||||
}),
|
}),
|
||||||
|
|||||||
@@ -41,7 +41,21 @@ export const scenariosRouter = createTRPCRouter({
|
|||||||
count,
|
count,
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
|
get: protectedProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
|
||||||
|
const scenario = await prisma.testScenario.findUnique({
|
||||||
|
where: {
|
||||||
|
id: input.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!scenario) {
|
||||||
|
throw new Error(`Scenario with id ${input.id} does not exist`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await requireCanViewExperiment(scenario.experimentId, ctx);
|
||||||
|
|
||||||
|
return scenario;
|
||||||
|
}),
|
||||||
create: protectedProcedure
|
create: protectedProcedure
|
||||||
.input(
|
.input(
|
||||||
z.object({
|
z.object({
|
||||||
|
|||||||
36
src/server/api/routers/worldChamps.router.ts
Normal file
36
src/server/api/routers/worldChamps.router.ts
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import { createTRPCRouter, protectedProcedure, publicProcedure } from "~/server/api/trpc";
|
||||||
|
import { prisma } from "~/server/db";
|
||||||
|
import { requireNothing } from "~/utils/accessControl";
|
||||||
|
|
||||||
|
export const worldChampsRouter = createTRPCRouter({
|
||||||
|
userStatus: publicProcedure.query(async ({ input, ctx }) => {
|
||||||
|
const userId = ctx.session?.user.id;
|
||||||
|
|
||||||
|
if (!userId) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await prisma.worldChampEntrant.findUnique({
|
||||||
|
where: { userId },
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
|
||||||
|
apply: protectedProcedure.mutation(async ({ ctx }) => {
|
||||||
|
const userId = ctx.session.user.id;
|
||||||
|
requireNothing(ctx);
|
||||||
|
|
||||||
|
const existingEntrant = await prisma.worldChampEntrant.findUnique({
|
||||||
|
where: { userId },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existingEntrant) {
|
||||||
|
return existingEntrant;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await prisma.worldChampEntrant.create({
|
||||||
|
data: {
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
});
|
||||||
@@ -40,7 +40,7 @@ const noOp = () => {};
|
|||||||
*
|
*
|
||||||
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
|
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
|
||||||
*/
|
*/
|
||||||
const createInnerTRPCContext = (opts: CreateContextOptions) => {
|
export const createInnerTRPCContext = (opts: CreateContextOptions) => {
|
||||||
return {
|
return {
|
||||||
session: opts.session,
|
session: opts.session,
|
||||||
prisma,
|
prisma,
|
||||||
|
|||||||
12
src/server/scripts/studio-prod.sh
Executable file
12
src/server/scripts/studio-prod.sh
Executable file
@@ -0,0 +1,12 @@
|
|||||||
|
#! /bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
cd "$(dirname "$0")/../../.."
|
||||||
|
|
||||||
|
|
||||||
|
set -o allexport
|
||||||
|
source .env
|
||||||
|
set +o allexport
|
||||||
|
|
||||||
|
echo "Connecting to prod db"
|
||||||
|
DATABASE_URL=$PROD_DATABASE_URL pnpm prisma studio
|
||||||
@@ -7,9 +7,9 @@ function defineTask<TPayload>(
|
|||||||
taskIdentifier: string,
|
taskIdentifier: string,
|
||||||
taskHandler: (payload: TPayload, helpers: Helpers) => Promise<void>,
|
taskHandler: (payload: TPayload, helpers: Helpers) => Promise<void>,
|
||||||
) {
|
) {
|
||||||
const enqueue = async (payload: TPayload) => {
|
const enqueue = async (payload: TPayload, runAt?: Date) => {
|
||||||
console.log("Enqueuing task", taskIdentifier, payload);
|
console.log("Enqueuing task", taskIdentifier, payload);
|
||||||
await quickAddJob({ connectionString: env.DATABASE_URL }, taskIdentifier, payload);
|
await quickAddJob({ connectionString: env.DATABASE_URL }, taskIdentifier, payload, { runAt });
|
||||||
};
|
};
|
||||||
|
|
||||||
const handler = (payload: TPayload, helpers: Helpers) => {
|
const handler = (payload: TPayload, helpers: Helpers) => {
|
||||||
|
|||||||
@@ -6,15 +6,15 @@ import { wsConnection } from "~/utils/wsConnection";
|
|||||||
import { runEvalsForOutput } from "../utils/evaluations";
|
import { runEvalsForOutput } from "../utils/evaluations";
|
||||||
import hashPrompt from "../utils/hashPrompt";
|
import hashPrompt from "../utils/hashPrompt";
|
||||||
import parseConstructFn from "../utils/parseConstructFn";
|
import parseConstructFn from "../utils/parseConstructFn";
|
||||||
import { sleep } from "../utils/sleep";
|
|
||||||
import defineTask from "./defineTask";
|
import defineTask from "./defineTask";
|
||||||
|
|
||||||
export type QueryModelJob = {
|
export type QueryModelJob = {
|
||||||
cellId: string;
|
cellId: string;
|
||||||
stream: boolean;
|
stream: boolean;
|
||||||
|
numPreviousTries: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
const MAX_AUTO_RETRIES = 10;
|
const MAX_AUTO_RETRIES = 50;
|
||||||
const MIN_DELAY = 500; // milliseconds
|
const MIN_DELAY = 500; // milliseconds
|
||||||
const MAX_DELAY = 15000; // milliseconds
|
const MAX_DELAY = 15000; // milliseconds
|
||||||
|
|
||||||
@@ -26,20 +26,12 @@ function calculateDelay(numPreviousTries: number): number {
|
|||||||
|
|
||||||
export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) => {
|
export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) => {
|
||||||
console.log("RUNNING TASK", task);
|
console.log("RUNNING TASK", task);
|
||||||
const { cellId, stream } = task;
|
const { cellId, stream, numPreviousTries } = task;
|
||||||
const cell = await prisma.scenarioVariantCell.findUnique({
|
const cell = await prisma.scenarioVariantCell.findUnique({
|
||||||
where: { id: cellId },
|
where: { id: cellId },
|
||||||
include: { modelOutput: true },
|
include: { modelResponses: true },
|
||||||
});
|
});
|
||||||
if (!cell) {
|
if (!cell) {
|
||||||
await prisma.scenarioVariantCell.update({
|
|
||||||
where: { id: cellId },
|
|
||||||
data: {
|
|
||||||
statusCode: 404,
|
|
||||||
errorMessage: "Cell not found",
|
|
||||||
retrievalStatus: "ERROR",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,6 +43,7 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
|
|||||||
where: { id: cellId },
|
where: { id: cellId },
|
||||||
data: {
|
data: {
|
||||||
retrievalStatus: "IN_PROGRESS",
|
retrievalStatus: "IN_PROGRESS",
|
||||||
|
jobStartedAt: new Date(),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -61,7 +54,6 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
|
|||||||
await prisma.scenarioVariantCell.update({
|
await prisma.scenarioVariantCell.update({
|
||||||
where: { id: cellId },
|
where: { id: cellId },
|
||||||
data: {
|
data: {
|
||||||
statusCode: 404,
|
|
||||||
errorMessage: "Prompt Variant not found",
|
errorMessage: "Prompt Variant not found",
|
||||||
retrievalStatus: "ERROR",
|
retrievalStatus: "ERROR",
|
||||||
},
|
},
|
||||||
@@ -76,7 +68,6 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
|
|||||||
await prisma.scenarioVariantCell.update({
|
await prisma.scenarioVariantCell.update({
|
||||||
where: { id: cellId },
|
where: { id: cellId },
|
||||||
data: {
|
data: {
|
||||||
statusCode: 404,
|
|
||||||
errorMessage: "Scenario not found",
|
errorMessage: "Scenario not found",
|
||||||
retrievalStatus: "ERROR",
|
retrievalStatus: "ERROR",
|
||||||
},
|
},
|
||||||
@@ -90,7 +81,6 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
|
|||||||
await prisma.scenarioVariantCell.update({
|
await prisma.scenarioVariantCell.update({
|
||||||
where: { id: cellId },
|
where: { id: cellId },
|
||||||
data: {
|
data: {
|
||||||
statusCode: 400,
|
|
||||||
errorMessage: prompt.error,
|
errorMessage: prompt.error,
|
||||||
retrievalStatus: "ERROR",
|
retrievalStatus: "ERROR",
|
||||||
},
|
},
|
||||||
@@ -106,17 +96,23 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
|
|||||||
}
|
}
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
for (let i = 0; true; i++) {
|
|
||||||
const response = await provider.getCompletion(prompt.modelInput, onStream);
|
|
||||||
if (response.type === "success") {
|
|
||||||
const inputHash = hashPrompt(prompt);
|
const inputHash = hashPrompt(prompt);
|
||||||
|
|
||||||
const modelOutput = await prisma.modelOutput.create({
|
let modelResponse = await prisma.modelResponse.create({
|
||||||
data: {
|
data: {
|
||||||
scenarioVariantCellId: cellId,
|
|
||||||
inputHash,
|
inputHash,
|
||||||
|
scenarioVariantCellId: cellId,
|
||||||
|
requestedAt: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const response = await provider.getCompletion(prompt.modelInput, onStream);
|
||||||
|
if (response.type === "success") {
|
||||||
|
modelResponse = await prisma.modelResponse.update({
|
||||||
|
where: { id: modelResponse.id },
|
||||||
|
data: {
|
||||||
output: response.value as Prisma.InputJsonObject,
|
output: response.value as Prisma.InputJsonObject,
|
||||||
timeToComplete: response.timeToComplete,
|
statusCode: response.statusCode,
|
||||||
|
receivedAt: new Date(),
|
||||||
promptTokens: response.promptTokens,
|
promptTokens: response.promptTokens,
|
||||||
completionTokens: response.completionTokens,
|
completionTokens: response.completionTokens,
|
||||||
cost: response.cost,
|
cost: response.cost,
|
||||||
@@ -126,38 +122,53 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
|
|||||||
await prisma.scenarioVariantCell.update({
|
await prisma.scenarioVariantCell.update({
|
||||||
where: { id: cellId },
|
where: { id: cellId },
|
||||||
data: {
|
data: {
|
||||||
statusCode: response.statusCode,
|
|
||||||
retrievalStatus: "COMPLETE",
|
retrievalStatus: "COMPLETE",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
await runEvalsForOutput(variant.experimentId, scenario, modelOutput);
|
await runEvalsForOutput(variant.experimentId, scenario, modelResponse, prompt.modelProvider);
|
||||||
break;
|
|
||||||
} else {
|
} else {
|
||||||
const shouldRetry = response.autoRetry && i < MAX_AUTO_RETRIES;
|
const shouldRetry = response.autoRetry && numPreviousTries < MAX_AUTO_RETRIES;
|
||||||
const delay = calculateDelay(i);
|
const delay = calculateDelay(numPreviousTries);
|
||||||
|
const retryTime = new Date(Date.now() + delay);
|
||||||
|
|
||||||
await prisma.scenarioVariantCell.update({
|
await prisma.modelResponse.update({
|
||||||
where: { id: cellId },
|
where: { id: modelResponse.id },
|
||||||
data: {
|
data: {
|
||||||
errorMessage: response.message,
|
|
||||||
statusCode: response.statusCode,
|
statusCode: response.statusCode,
|
||||||
retryTime: shouldRetry ? new Date(Date.now() + delay) : null,
|
errorMessage: response.message,
|
||||||
retrievalStatus: "ERROR",
|
receivedAt: new Date(),
|
||||||
|
retryTime: shouldRetry ? retryTime : null,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (shouldRetry) {
|
if (shouldRetry) {
|
||||||
await sleep(delay);
|
await queryModel.enqueue(
|
||||||
|
{
|
||||||
|
cellId,
|
||||||
|
stream,
|
||||||
|
numPreviousTries: numPreviousTries + 1,
|
||||||
|
},
|
||||||
|
retryTime,
|
||||||
|
);
|
||||||
|
await prisma.scenarioVariantCell.update({
|
||||||
|
where: { id: cellId },
|
||||||
|
data: {
|
||||||
|
retrievalStatus: "PENDING",
|
||||||
|
},
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
break;
|
await prisma.scenarioVariantCell.update({
|
||||||
}
|
where: { id: cellId },
|
||||||
|
data: {
|
||||||
|
retrievalStatus: "ERROR",
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
export const queueQueryModel = async (cellId: string, stream: boolean) => {
|
export const queueQueryModel = async (cellId: string, stream: boolean) => {
|
||||||
console.log("queueQueryModel", cellId, stream);
|
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
prisma.scenarioVariantCell.update({
|
prisma.scenarioVariantCell.update({
|
||||||
where: {
|
where: {
|
||||||
@@ -166,10 +177,9 @@ export const queueQueryModel = async (cellId: string, stream: boolean) => {
|
|||||||
data: {
|
data: {
|
||||||
retrievalStatus: "PENDING",
|
retrievalStatus: "PENDING",
|
||||||
errorMessage: null,
|
errorMessage: null,
|
||||||
|
jobQueuedAt: new Date(),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
queryModel.enqueue({ cellId, stream, numPreviousTries: 0 }),
|
||||||
await queryModel.enqueue({ cellId, stream }),
|
|
||||||
console.log("queued"),
|
|
||||||
]);
|
]);
|
||||||
};
|
};
|
||||||
|
|||||||
17
src/server/tasks/runNewEval.task.ts
Normal file
17
src/server/tasks/runNewEval.task.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { runAllEvals } from "../utils/evaluations";
|
||||||
|
import defineTask from "./defineTask";
|
||||||
|
|
||||||
|
export type RunNewEvalJob = {
|
||||||
|
experimentId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
// When a new eval is created, we want to run it on all existing outputs, but return the new eval first
|
||||||
|
export const runNewEval = defineTask<RunNewEvalJob>("runNewEval", async (task) => {
|
||||||
|
console.log("RUNNING TASK", task);
|
||||||
|
const { experimentId } = task;
|
||||||
|
await runAllEvals(experimentId);
|
||||||
|
});
|
||||||
|
|
||||||
|
export const queueRunNewEval = async (experimentId: string) => {
|
||||||
|
await runNewEval.enqueue({ experimentId });
|
||||||
|
};
|
||||||
@@ -3,10 +3,11 @@ import "dotenv/config";
|
|||||||
|
|
||||||
import { env } from "~/env.mjs";
|
import { env } from "~/env.mjs";
|
||||||
import { queryModel } from "./queryModel.task";
|
import { queryModel } from "./queryModel.task";
|
||||||
|
import { runNewEval } from "./runNewEval.task";
|
||||||
|
|
||||||
console.log("Starting worker");
|
console.log("Starting worker");
|
||||||
|
|
||||||
const registeredTasks = [queryModel];
|
const registeredTasks = [queryModel, runNewEval];
|
||||||
|
|
||||||
const taskList = registeredTasks.reduce((acc, task) => {
|
const taskList = registeredTasks.reduce((acc, task) => {
|
||||||
acc[task.task.identifier] = task.task.handler;
|
acc[task.task.identifier] = task.task.handler;
|
||||||
@@ -16,7 +17,7 @@ const taskList = registeredTasks.reduce((acc, task) => {
|
|||||||
// Run a worker to execute jobs:
|
// Run a worker to execute jobs:
|
||||||
const runner = await run({
|
const runner = await run({
|
||||||
connectionString: env.DATABASE_URL,
|
connectionString: env.DATABASE_URL,
|
||||||
concurrency: 20,
|
concurrency: 50,
|
||||||
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
|
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
|
||||||
noHandleSignals: false,
|
noHandleSignals: false,
|
||||||
pollInterval: 1000,
|
pollInterval: 1000,
|
||||||
|
|||||||
@@ -1,19 +1,25 @@
|
|||||||
import { type ModelOutput, type Evaluation } from "@prisma/client";
|
import { type ModelResponse, type Evaluation, Prisma } from "@prisma/client";
|
||||||
import { prisma } from "../db";
|
import { prisma } from "../db";
|
||||||
import { runOneEval } from "./runOneEval";
|
import { runOneEval } from "./runOneEval";
|
||||||
import { type Scenario } from "~/components/OutputsTable/types";
|
import { type Scenario } from "~/components/OutputsTable/types";
|
||||||
|
import { type SupportedProvider } from "~/modelProviders/types";
|
||||||
|
|
||||||
const saveResult = async (evaluation: Evaluation, scenario: Scenario, modelOutput: ModelOutput) => {
|
const runAndSaveEval = async (
|
||||||
const result = await runOneEval(evaluation, scenario, modelOutput);
|
evaluation: Evaluation,
|
||||||
|
scenario: Scenario,
|
||||||
|
modelResponse: ModelResponse,
|
||||||
|
provider: SupportedProvider,
|
||||||
|
) => {
|
||||||
|
const result = await runOneEval(evaluation, scenario, modelResponse, provider);
|
||||||
return await prisma.outputEvaluation.upsert({
|
return await prisma.outputEvaluation.upsert({
|
||||||
where: {
|
where: {
|
||||||
modelOutputId_evaluationId: {
|
modelResponseId_evaluationId: {
|
||||||
modelOutputId: modelOutput.id,
|
modelResponseId: modelResponse.id,
|
||||||
evaluationId: evaluation.id,
|
evaluationId: evaluation.id,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
create: {
|
create: {
|
||||||
modelOutputId: modelOutput.id,
|
modelResponseId: modelResponse.id,
|
||||||
evaluationId: evaluation.id,
|
evaluationId: evaluation.id,
|
||||||
...result,
|
...result,
|
||||||
},
|
},
|
||||||
@@ -26,20 +32,28 @@ const saveResult = async (evaluation: Evaluation, scenario: Scenario, modelOutpu
|
|||||||
export const runEvalsForOutput = async (
|
export const runEvalsForOutput = async (
|
||||||
experimentId: string,
|
experimentId: string,
|
||||||
scenario: Scenario,
|
scenario: Scenario,
|
||||||
modelOutput: ModelOutput,
|
modelResponse: ModelResponse,
|
||||||
|
provider: SupportedProvider,
|
||||||
) => {
|
) => {
|
||||||
const evaluations = await prisma.evaluation.findMany({
|
const evaluations = await prisma.evaluation.findMany({
|
||||||
where: { experimentId },
|
where: { experimentId },
|
||||||
});
|
});
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
evaluations.map(async (evaluation) => await saveResult(evaluation, scenario, modelOutput)),
|
evaluations.map(
|
||||||
|
async (evaluation) => await runAndSaveEval(evaluation, scenario, modelResponse, provider),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Will not run eval-output pairs that already exist in the database
|
||||||
export const runAllEvals = async (experimentId: string) => {
|
export const runAllEvals = async (experimentId: string) => {
|
||||||
const outputs = await prisma.modelOutput.findMany({
|
const outputs = await prisma.modelResponse.findMany({
|
||||||
where: {
|
where: {
|
||||||
|
outdated: false,
|
||||||
|
output: {
|
||||||
|
not: Prisma.AnyNull,
|
||||||
|
},
|
||||||
scenarioVariantCell: {
|
scenarioVariantCell: {
|
||||||
promptVariant: {
|
promptVariant: {
|
||||||
experimentId,
|
experimentId,
|
||||||
@@ -54,6 +68,7 @@ export const runAllEvals = async (experimentId: string) => {
|
|||||||
scenarioVariantCell: {
|
scenarioVariantCell: {
|
||||||
include: {
|
include: {
|
||||||
testScenario: true,
|
testScenario: true,
|
||||||
|
promptVariant: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
outputEvaluations: true,
|
outputEvaluations: true,
|
||||||
@@ -65,13 +80,18 @@ export const runAllEvals = async (experimentId: string) => {
|
|||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
outputs.map(async (output) => {
|
outputs.map(async (output) => {
|
||||||
const unrunEvals = evals.filter(
|
const evalsToBeRun = evals.filter(
|
||||||
(evaluation) => !output.outputEvaluations.find((e) => e.evaluationId === evaluation.id),
|
(evaluation) => !output.outputEvaluations.find((e) => e.evaluationId === evaluation.id),
|
||||||
);
|
);
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
unrunEvals.map(async (evaluation) => {
|
evalsToBeRun.map(async (evaluation) => {
|
||||||
await saveResult(evaluation, output.scenarioVariantCell.testScenario, output);
|
await runAndSaveEval(
|
||||||
|
evaluation,
|
||||||
|
output.scenarioVariantCell.testScenario,
|
||||||
|
output,
|
||||||
|
output.scenarioVariantCell.promptVariant.modelProvider as SupportedProvider,
|
||||||
|
);
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
}),
|
}),
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { type Prisma } from "@prisma/client";
|
import { Prisma } from "@prisma/client";
|
||||||
import { prisma } from "../db";
|
import { prisma } from "../db";
|
||||||
import parseConstructFn from "./parseConstructFn";
|
import parseConstructFn from "./parseConstructFn";
|
||||||
import { type JsonObject } from "type-fest";
|
import { type JsonObject } from "type-fest";
|
||||||
@@ -35,7 +35,7 @@ export const generateNewCell = async (
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
include: {
|
include: {
|
||||||
modelOutput: true,
|
modelResponses: true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -51,8 +51,6 @@ export const generateNewCell = async (
|
|||||||
data: {
|
data: {
|
||||||
promptVariantId: variantId,
|
promptVariantId: variantId,
|
||||||
testScenarioId: scenarioId,
|
testScenarioId: scenarioId,
|
||||||
statusCode: 400,
|
|
||||||
errorMessage: parsedConstructFn.error,
|
|
||||||
retrievalStatus: "ERROR",
|
retrievalStatus: "ERROR",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -69,36 +67,55 @@ export const generateNewCell = async (
|
|||||||
retrievalStatus: "PENDING",
|
retrievalStatus: "PENDING",
|
||||||
},
|
},
|
||||||
include: {
|
include: {
|
||||||
modelOutput: true,
|
modelResponses: true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const matchingModelOutput = await prisma.modelOutput.findFirst({
|
const matchingModelResponse = await prisma.modelResponse.findFirst({
|
||||||
where: { inputHash },
|
where: {
|
||||||
|
inputHash,
|
||||||
|
output: {
|
||||||
|
not: Prisma.AnyNull,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
receivedAt: "desc",
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
scenarioVariantCell: true,
|
||||||
|
},
|
||||||
|
take: 1,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (matchingModelOutput) {
|
if (matchingModelResponse) {
|
||||||
const newModelOutput = await prisma.modelOutput.create({
|
const newModelResponse = await prisma.modelResponse.create({
|
||||||
data: {
|
data: {
|
||||||
...omit(matchingModelOutput, ["id"]),
|
...omit(matchingModelResponse, ["id", "scenarioVariantCell"]),
|
||||||
scenarioVariantCellId: cell.id,
|
scenarioVariantCellId: cell.id,
|
||||||
output: matchingModelOutput.output as Prisma.InputJsonValue,
|
output: matchingModelResponse.output as Prisma.InputJsonValue,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
await prisma.scenarioVariantCell.update({
|
await prisma.scenarioVariantCell.update({
|
||||||
where: { id: cell.id },
|
where: { id: cell.id },
|
||||||
data: { retrievalStatus: "COMPLETE" },
|
data: {
|
||||||
|
retrievalStatus: "COMPLETE",
|
||||||
|
jobStartedAt: matchingModelResponse.scenarioVariantCell.jobStartedAt,
|
||||||
|
jobQueuedAt: matchingModelResponse.scenarioVariantCell.jobQueuedAt,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Copy over all eval results as well
|
// Copy over all eval results as well
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
(
|
(
|
||||||
await prisma.outputEvaluation.findMany({ where: { modelOutputId: matchingModelOutput.id } })
|
await prisma.outputEvaluation.findMany({
|
||||||
|
where: { modelResponseId: matchingModelResponse.id },
|
||||||
|
})
|
||||||
).map(async (evaluation) => {
|
).map(async (evaluation) => {
|
||||||
await prisma.outputEvaluation.create({
|
await prisma.outputEvaluation.create({
|
||||||
data: {
|
data: {
|
||||||
...omit(evaluation, ["id"]),
|
...omit(evaluation, ["id"]),
|
||||||
modelOutputId: newModelOutput.id,
|
modelResponseId: newModelResponse.id,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}),
|
}),
|
||||||
|
|||||||
@@ -2,4 +2,5 @@ import { env } from "~/env.mjs";
|
|||||||
|
|
||||||
import OpenAI from "openai";
|
import OpenAI from "openai";
|
||||||
|
|
||||||
export const openai = new OpenAI({ apiKey: env.OPENAI_API_KEY });
|
// Set a dummy key so it doesn't fail at build time
|
||||||
|
export const openai = new OpenAI({ apiKey: env.OPENAI_API_KEY ?? "dummy-key" });
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
import { type Evaluation, type ModelOutput, type TestScenario } from "@prisma/client";
|
import { type Evaluation, type ModelResponse, type TestScenario } from "@prisma/client";
|
||||||
import { type ChatCompletion } from "openai/resources/chat";
|
|
||||||
import { type VariableMap, fillTemplate, escapeRegExp, escapeQuotes } from "./fillTemplate";
|
import { type VariableMap, fillTemplate, escapeRegExp, escapeQuotes } from "./fillTemplate";
|
||||||
import { openai } from "./openai";
|
import { openai } from "./openai";
|
||||||
import dedent from "dedent";
|
import dedent from "dedent";
|
||||||
|
import modelProviders from "~/modelProviders/modelProviders";
|
||||||
|
import { type SupportedProvider } from "~/modelProviders/types";
|
||||||
|
|
||||||
export const runGpt4Eval = async (
|
export const runGpt4Eval = async (
|
||||||
evaluation: Evaluation,
|
evaluation: Evaluation,
|
||||||
scenario: TestScenario,
|
scenario: TestScenario,
|
||||||
message: ChatCompletion.Choice.Message,
|
stringifiedOutput: string,
|
||||||
): Promise<{ result: number; details: string }> => {
|
): Promise<{ result: number; details: string }> => {
|
||||||
const output = await openai.chat.completions.create({
|
const output = await openai.chat.completions.create({
|
||||||
model: "gpt-4-0613",
|
model: "gpt-4-0613",
|
||||||
@@ -26,11 +27,7 @@ export const runGpt4Eval = async (
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
role: "user",
|
role: "user",
|
||||||
content: `The full output of the simpler message:\n---\n${JSON.stringify(
|
content: `The full output of the simpler message:\n---\n${stringifiedOutput}`,
|
||||||
message.content ?? message.function_call,
|
|
||||||
null,
|
|
||||||
2,
|
|
||||||
)}`,
|
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
function_call: {
|
function_call: {
|
||||||
@@ -70,15 +67,16 @@ export const runGpt4Eval = async (
|
|||||||
export const runOneEval = async (
|
export const runOneEval = async (
|
||||||
evaluation: Evaluation,
|
evaluation: Evaluation,
|
||||||
scenario: TestScenario,
|
scenario: TestScenario,
|
||||||
modelOutput: ModelOutput,
|
modelResponse: ModelResponse,
|
||||||
|
provider: SupportedProvider,
|
||||||
): Promise<{ result: number; details?: string }> => {
|
): Promise<{ result: number; details?: string }> => {
|
||||||
const output = modelOutput.output as unknown as ChatCompletion;
|
const modelProvider = modelProviders[provider];
|
||||||
|
const message = modelProvider.normalizeOutput(modelResponse.output);
|
||||||
const message = output?.choices?.[0]?.message;
|
|
||||||
|
|
||||||
if (!message) return { result: 0 };
|
if (!message) return { result: 0 };
|
||||||
|
|
||||||
const stringifiedMessage = message.content ?? JSON.stringify(message.function_call);
|
const stringifiedOutput =
|
||||||
|
message.type === "json" ? JSON.stringify(message.value, null, 2) : message.value;
|
||||||
|
|
||||||
const matchRegex = escapeRegExp(
|
const matchRegex = escapeRegExp(
|
||||||
fillTemplate(escapeQuotes(evaluation.value), scenario.variableValues as VariableMap),
|
fillTemplate(escapeQuotes(evaluation.value), scenario.variableValues as VariableMap),
|
||||||
@@ -86,10 +84,10 @@ export const runOneEval = async (
|
|||||||
|
|
||||||
switch (evaluation.evalType) {
|
switch (evaluation.evalType) {
|
||||||
case "CONTAINS":
|
case "CONTAINS":
|
||||||
return { result: stringifiedMessage.match(matchRegex) !== null ? 1 : 0 };
|
return { result: stringifiedOutput.match(matchRegex) !== null ? 1 : 0 };
|
||||||
case "DOES_NOT_CONTAIN":
|
case "DOES_NOT_CONTAIN":
|
||||||
return { result: stringifiedMessage.match(matchRegex) === null ? 1 : 0 };
|
return { result: stringifiedOutput.match(matchRegex) === null ? 1 : 0 };
|
||||||
case "GPT4_EVAL":
|
case "GPT4_EVAL":
|
||||||
return await runGpt4Eval(evaluation, scenario, message);
|
return await runGpt4Eval(evaluation, scenario, stringifiedOutput);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -7,3 +7,5 @@ dayjs.extend(relativeTime);
|
|||||||
|
|
||||||
export const formatTimePast = (date: Date) =>
|
export const formatTimePast = (date: Date) =>
|
||||||
dayjs.duration(dayjs(date).diff(dayjs())).humanize(true);
|
dayjs.duration(dayjs(date).diff(dayjs())).humanize(true);
|
||||||
|
|
||||||
|
export default dayjs;
|
||||||
|
|||||||
@@ -107,4 +107,8 @@ export const useScenarios = () => {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const useScenario = (scenarioId: string) => {
|
||||||
|
return api.scenarios.get.useQuery({ id: scenarioId });
|
||||||
|
};
|
||||||
|
|
||||||
export const useVisibleScenarioIds = () => useScenarios().data?.scenarios.map((s) => s.id) ?? [];
|
export const useVisibleScenarioIds = () => useScenarios().data?.scenarios.map((s) => s.id) ?? [];
|
||||||
|
|||||||
Reference in New Issue
Block a user