Compare commits

..

10 Commits

Author SHA1 Message Date
Kyle Corbitt
0c37506975 Bugfix: show prompts for view-only experiments
Accidentally put the Monaco loading code in the wrong place! This fixes.
2023-07-24 14:57:33 -07:00
arcticfly
2b2e0ab8ee Define refinement actions in the model providers (#87)
* Add descriptions of fields in llama 2 input schema

* Let GPT-4 know when the provider stays the same

* Allow refetching in the event of any errors

* Define refinement actions in model providers

* Fix prettier
2023-07-23 17:37:08 -07:00
arcticfly
3dbb06ec00 Give negative margin to account for border (#86) 2023-07-23 16:49:12 -07:00
David Corbitt
85d42a014b Fix padding on AddVariant button 2023-07-22 16:27:59 -07:00
arcticfly
7d1ded3b18 Improve menu styling (#85) 2023-07-22 16:22:00 -07:00
Kyle Corbitt
b00f6dd04b Merge pull request #84 from OpenPipe/paginated-scenarios
Paginate scenarios
2023-07-22 16:12:02 -07:00
Kyle Corbitt
2e395e4d39 Paginate scenarios
Show 10 scenarios at a time and let the user paginate through them to keep the interface responsive with potentially 1000s of scenarios.
2023-07-22 16:10:16 -07:00
Kyle Corbitt
4b06d05908 Merge pull request #82 from OpenPipe/space-out-scenarios
Separate scenarios from prompts in outputs table
2023-07-22 14:44:51 -07:00
Kyle Corbitt
aabf355b81 Merge pull request #81 from OpenPipe/fullscreen-editor
Fullscreen editor
2023-07-22 14:44:42 -07:00
Kyle Corbitt
cc1d1178da Fullscreen editor 2023-07-21 22:19:38 -07:00
28 changed files with 677 additions and 384 deletions

View File

@@ -1,6 +1,3 @@
{ {
"eslint.format.enable": true, "eslint.format.enable": true
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
}
} }

View File

@@ -59,6 +59,7 @@
"lodash-es": "^4.17.21", "lodash-es": "^4.17.21",
"next": "^13.4.2", "next": "^13.4.2",
"next-auth": "^4.22.1", "next-auth": "^4.22.1",
"next-query-params": "^4.2.3",
"nextjs-routes": "^2.0.1", "nextjs-routes": "^2.0.1",
"openai": "4.0.0-beta.2", "openai": "4.0.0-beta.2",
"pluralize": "^8.0.0", "pluralize": "^8.0.0",
@@ -79,6 +80,7 @@
"superjson": "1.12.2", "superjson": "1.12.2",
"tsx": "^3.12.7", "tsx": "^3.12.7",
"type-fest": "^4.0.0", "type-fest": "^4.0.0",
"use-query-params": "^2.2.1",
"vite-tsconfig-paths": "^4.2.0", "vite-tsconfig-paths": "^4.2.0",
"zod": "^3.21.4", "zod": "^3.21.4",
"zustand": "^4.3.9" "zustand": "^4.3.9"

41
pnpm-lock.yaml generated
View File

@@ -119,6 +119,9 @@ dependencies:
next-auth: next-auth:
specifier: ^4.22.1 specifier: ^4.22.1
version: 4.22.1(next@13.4.2)(react-dom@18.2.0)(react@18.2.0) version: 4.22.1(next@13.4.2)(react-dom@18.2.0)(react@18.2.0)
next-query-params:
specifier: ^4.2.3
version: 4.2.3(next@13.4.2)(react@18.2.0)(use-query-params@2.2.1)
nextjs-routes: nextjs-routes:
specifier: ^2.0.1 specifier: ^2.0.1
version: 2.0.1(next@13.4.2) version: 2.0.1(next@13.4.2)
@@ -179,6 +182,9 @@ dependencies:
type-fest: type-fest:
specifier: ^4.0.0 specifier: ^4.0.0
version: 4.0.0 version: 4.0.0
use-query-params:
specifier: ^2.2.1
version: 2.2.1(react-dom@18.2.0)(react@18.2.0)
vite-tsconfig-paths: vite-tsconfig-paths:
specifier: ^4.2.0 specifier: ^4.2.0
version: 4.2.0(typescript@5.0.4) version: 4.2.0(typescript@5.0.4)
@@ -6037,6 +6043,19 @@ packages:
uuid: 8.3.2 uuid: 8.3.2
dev: false dev: false
/next-query-params@4.2.3(next@13.4.2)(react@18.2.0)(use-query-params@2.2.1):
resolution: {integrity: sha512-hGNCYRH8YyA5ItiBGSKrtMl21b2MAqfPkdI1mvwloNVqSU142IaGzqHN+OTovyeLIpQfonY01y7BAHb/UH4POg==}
peerDependencies:
next: ^10.0.0 || ^11.0.0 || ^12.0.0 || ^13.0.0
react: ^16.8.0 || ^17.0.0 || ^18.0.0
use-query-params: ^2.0.0
dependencies:
next: 13.4.2(@babel/core@7.22.9)(react-dom@18.2.0)(react@18.2.0)
react: 18.2.0
tslib: 2.6.0
use-query-params: 2.2.1(react-dom@18.2.0)(react@18.2.0)
dev: false
/next-tick@1.1.0: /next-tick@1.1.0:
resolution: {integrity: sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==} resolution: {integrity: sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==}
dev: false dev: false
@@ -7147,6 +7166,10 @@ packages:
randombytes: 2.1.0 randombytes: 2.1.0
dev: true dev: true
/serialize-query-params@2.0.2:
resolution: {integrity: sha512-1chMo1dST4pFA9RDXAtF0Rbjaut4is7bzFbI1Z26IuMub68pNCILku85aYmeFhvnY//BXUPUhoRMjYcsT93J/Q==}
dev: false
/serve-static@1.15.0: /serve-static@1.15.0:
resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==} resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==}
engines: {node: '>= 0.8.0'} engines: {node: '>= 0.8.0'}
@@ -7824,6 +7847,24 @@ packages:
use-isomorphic-layout-effect: 1.1.2(@types/react@18.2.6)(react@18.2.0) use-isomorphic-layout-effect: 1.1.2(@types/react@18.2.6)(react@18.2.0)
dev: false dev: false
/use-query-params@2.2.1(react-dom@18.2.0)(react@18.2.0):
resolution: {integrity: sha512-i6alcyLB8w9i3ZK3caNftdb+UnbfBRNPDnc89CNQWkGRmDrm/gfydHvMBfVsQJRq3NoHOM2dt/ceBWG2397v1Q==}
peerDependencies:
'@reach/router': ^1.2.1
react: '>=16.8.0'
react-dom: '>=16.8.0'
react-router-dom: '>=5'
peerDependenciesMeta:
'@reach/router':
optional: true
react-router-dom:
optional: true
dependencies:
react: 18.2.0
react-dom: 18.2.0(react@18.2.0)
serialize-query-params: 2.0.2
dev: false
/use-sidecar@1.1.2(@types/react@18.2.6)(react@18.2.0): /use-sidecar@1.1.2(@types/react@18.2.6)(react@18.2.0):
resolution: {integrity: sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==} resolution: {integrity: sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==}
engines: {node: '>=10'} engines: {node: '>=10'}

View File

@@ -7,9 +7,13 @@ const defaultId = "11111111-1111-1111-1111-111111111111";
await prisma.organization.deleteMany({ await prisma.organization.deleteMany({
where: { id: defaultId }, where: { id: defaultId },
}); });
await prisma.organization.create({
// If there's an existing org, just seed into it
const org =
(await prisma.organization.findFirst({})) ??
(await prisma.organization.create({
data: { id: defaultId }, data: { id: defaultId },
}); }));
await prisma.experiment.deleteMany({ await prisma.experiment.deleteMany({
where: { where: {
@@ -21,7 +25,7 @@ await prisma.experiment.create({
data: { data: {
id: defaultId, id: defaultId,
label: "Country Capitals Example", label: "Country Capitals Example",
organizationId: defaultId, organizationId: org.id,
}, },
}); });
@@ -103,30 +107,41 @@ await prisma.testScenario.deleteMany({
}, },
}); });
const countries = [
"Afghanistan",
"Albania",
"Algeria",
"Andorra",
"Angola",
"Antigua and Barbuda",
"Argentina",
"Armenia",
"Australia",
"Austria",
"Austrian Empire",
"Azerbaijan",
"Baden",
"Bahamas, The",
"Bahrain",
"Bangladesh",
"Barbados",
"Bavaria",
"Belarus",
"Belgium",
"Belize",
"Benin (Dahomey)",
"Bolivia",
"Bosnia and Herzegovina",
"Botswana",
];
await prisma.testScenario.createMany({ await prisma.testScenario.createMany({
data: [ data: countries.map((country, i) => ({
{
experimentId: defaultId, experimentId: defaultId,
sortIndex: 0, sortIndex: i,
variableValues: { variableValues: {
country: "Spain", country: country,
}, },
}, })),
{
experimentId: defaultId,
sortIndex: 1,
variableValues: {
country: "USA",
},
},
{
experimentId: defaultId,
sortIndex: 2,
variableValues: {
country: "Chile",
},
},
],
}); });
const variants = await prisma.promptVariant.findMany({ const variants = await prisma.promptVariant.findMany({

View File

@@ -1,5 +1,6 @@
import { Box, Flex, Icon, Spinner } from "@chakra-ui/react"; import { Box, Flex, Icon, Spinner } from "@chakra-ui/react";
import { BsPlus } from "react-icons/bs"; import { BsPlus } from "react-icons/bs";
import { Text } from "@chakra-ui/react";
import { api } from "~/utils/api"; import { api } from "~/utils/api";
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks"; import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
import { cellPadding } from "../constants"; import { cellPadding } from "../constants";
@@ -25,9 +26,10 @@ export default function AddVariantButton() {
<Flex w="100%" justifyContent="flex-end"> <Flex w="100%" justifyContent="flex-end">
<ActionButton <ActionButton
onClick={onClick} onClick={onClick}
py={5}
leftIcon={<Icon as={loading ? Spinner : BsPlus} boxSize={6} mr={loading ? 1 : 0} />} leftIcon={<Icon as={loading ? Spinner : BsPlus} boxSize={6} mr={loading ? 1 : 0} />}
> >
Add Variant <Text display={{ base: "none", md: "flex" }}>Add Variant</Text>
</ActionButton> </ActionButton>
{/* <Button {/* <Button
alignItems="center" alignItems="center"

View File

@@ -81,10 +81,21 @@ export default function OutputCell({
</Center> </Center>
); );
if (!cell && !fetchingOutput) return <Text color="gray.500">Error retrieving output</Text>; if (!cell && !fetchingOutput)
return (
<VStack>
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} />
<Text color="gray.500">Error retrieving output</Text>
</VStack>
);
if (cell && cell.errorMessage) { if (cell && cell.errorMessage) {
return <ErrorHandler cell={cell} refetchOutput={hardRefetch} />; return (
<VStack>
<CellOptions refetchingOutput={hardRefetching} refetchOutput={hardRefetch} />
<ErrorHandler cell={cell} refetchOutput={hardRefetch} />
</VStack>
);
} }
const normalizedOutput = modelOutput const normalizedOutput = modelOutput

View File

@@ -0,0 +1,74 @@
import { Box, HStack, IconButton } from "@chakra-ui/react";
import {
BsChevronDoubleLeft,
BsChevronDoubleRight,
BsChevronLeft,
BsChevronRight,
} from "react-icons/bs";
import { usePage, useScenarios } from "~/utils/hooks";
const ScenarioPaginator = () => {
const [page, setPage] = usePage();
const { data } = useScenarios();
if (!data) return null;
const { scenarios, startIndex, lastPage, count } = data;
const nextPage = () => {
if (page < lastPage) {
setPage(page + 1, "replace");
}
};
const prevPage = () => {
if (page > 1) {
setPage(page - 1, "replace");
}
};
const goToLastPage = () => setPage(lastPage, "replace");
const goToFirstPage = () => setPage(1, "replace");
return (
<HStack pt={4}>
<IconButton
variant="ghost"
size="sm"
onClick={goToFirstPage}
isDisabled={page === 1}
aria-label="Go to first page"
icon={<BsChevronDoubleLeft />}
/>
<IconButton
variant="ghost"
size="sm"
onClick={prevPage}
isDisabled={page === 1}
aria-label="Previous page"
icon={<BsChevronLeft />}
/>
<Box>
{startIndex}-{startIndex + scenarios.length - 1} / {count}
</Box>
<IconButton
variant="ghost"
size="sm"
onClick={nextPage}
isDisabled={page === lastPage}
aria-label="Next page"
icon={<BsChevronRight />}
/>
<IconButton
variant="ghost"
size="sm"
onClick={goToLastPage}
isDisabled={page === lastPage}
aria-label="Go to last page"
icon={<BsChevronDoubleRight />}
/>
</HStack>
);
};
export default ScenarioPaginator;

View File

@@ -12,7 +12,12 @@ import {
Spinner, Spinner,
} from "@chakra-ui/react"; } from "@chakra-ui/react";
import { cellPadding } from "../constants"; import { cellPadding } from "../constants";
import { useExperiment, useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks"; import {
useExperiment,
useExperimentAccess,
useHandledAsyncCallback,
useScenarios,
} from "~/utils/hooks";
import { BsGear, BsPencil, BsPlus, BsStars } from "react-icons/bs"; import { BsGear, BsPencil, BsPlus, BsStars } from "react-icons/bs";
import { useAppStore } from "~/state/store"; import { useAppStore } from "~/state/store";
import { api } from "~/utils/api"; import { api } from "~/utils/api";
@@ -21,9 +26,10 @@ export const ActionButton = (props: ButtonProps) => (
<Button size="sm" variant="ghost" color="gray.600" {...props} /> <Button size="sm" variant="ghost" color="gray.600" {...props} />
); );
export const ScenariosHeader = (props: { numScenarios: number }) => { export const ScenariosHeader = () => {
const openDrawer = useAppStore((s) => s.openDrawer); const openDrawer = useAppStore((s) => s.openDrawer);
const { canModify } = useExperimentAccess(); const { canModify } = useExperimentAccess();
const scenarios = useScenarios();
const experiment = useExperiment(); const experiment = useExperiment();
const createScenarioMutation = api.scenarios.create.useMutation(); const createScenarioMutation = api.scenarios.create.useMutation();
@@ -44,7 +50,7 @@ export const ScenariosHeader = (props: { numScenarios: number }) => {
return ( return (
<HStack w="100%" pb={cellPadding.y} pt={0} align="center" spacing={0}> <HStack w="100%" pb={cellPadding.y} pt={0} align="center" spacing={0}>
<Text fontSize={16} fontWeight="bold"> <Text fontSize={16} fontWeight="bold">
Scenarios ({props.numScenarios}) Scenarios ({scenarios.data?.count})
</Text> </Text>
{canModify && ( {canModify && (
<Menu> <Menu>
@@ -55,8 +61,11 @@ export const ScenariosHeader = (props: { numScenarios: number }) => {
icon={<Icon as={loading ? Spinner : BsGear} />} icon={<Icon as={loading ? Spinner : BsGear} />}
/> />
</MenuButton> </MenuButton>
<MenuList fontSize="md"> <MenuList fontSize="md" zIndex="dropdown" mt={-3}>
<MenuItem icon={<Icon as={BsPlus} boxSize={6} />} onClick={() => onAddScenario(false)}> <MenuItem
icon={<Icon as={BsPlus} boxSize={6} mx={-1} />}
onClick={() => onAddScenario(false)}
>
Add Scenario Add Scenario
</MenuItem> </MenuItem>
<MenuItem icon={<BsStars />} onClick={() => onAddScenario(true)}> <MenuItem icon={<BsStars />} onClick={() => onAddScenario(true)}>

View File

@@ -1,17 +1,47 @@
import { Box, Button, HStack, Spinner, Tooltip, useToast, Text } from "@chakra-ui/react"; import {
Box,
Button,
HStack,
Spinner,
Tooltip,
useToast,
Text,
IconButton,
} from "@chakra-ui/react";
import { useRef, useEffect, useState, useCallback } from "react"; import { useRef, useEffect, useState, useCallback } from "react";
import { useExperimentAccess, useHandledAsyncCallback, useModifierKeyLabel } from "~/utils/hooks"; import { useExperimentAccess, useHandledAsyncCallback, useModifierKeyLabel } from "~/utils/hooks";
import { type PromptVariant } from "./types"; import { type PromptVariant } from "./types";
import { api } from "~/utils/api"; import { api } from "~/utils/api";
import { useAppStore } from "~/state/store"; import { useAppStore } from "~/state/store";
import { FiMaximize, FiMinimize } from "react-icons/fi";
import { editorBackground } from "~/state/sharedVariantEditor.slice";
export default function VariantEditor(props: { variant: PromptVariant }) { export default function VariantEditor(props: { variant: PromptVariant }) {
const { canModify } = useExperimentAccess(); const { canModify } = useExperimentAccess();
const monaco = useAppStore.use.sharedVariantEditor.monaco(); const monaco = useAppStore.use.sharedVariantEditor.monaco();
const editorRef = useRef<ReturnType<NonNullable<typeof monaco>["editor"]["create"]> | null>(null); const editorRef = useRef<ReturnType<NonNullable<typeof monaco>["editor"]["create"]> | null>(null);
const containerRef = useRef<HTMLDivElement | null>(null);
const [editorId] = useState(() => `editor_${Math.random().toString(36).substring(7)}`); const [editorId] = useState(() => `editor_${Math.random().toString(36).substring(7)}`);
const [isChanged, setIsChanged] = useState(false); const [isChanged, setIsChanged] = useState(false);
const [isFullscreen, setIsFullscreen] = useState(false);
const toggleFullscreen = useCallback(() => {
setIsFullscreen((prev) => !prev);
editorRef.current?.focus();
}, [setIsFullscreen]);
useEffect(() => {
const handleEsc = (event: KeyboardEvent) => {
if (event.key === "Escape" && isFullscreen) {
toggleFullscreen();
}
};
window.addEventListener("keydown", handleEsc);
return () => window.removeEventListener("keydown", handleEsc);
}, [isFullscreen, toggleFullscreen]);
const lastSavedFn = props.variant.constructFn; const lastSavedFn = props.variant.constructFn;
const modifierKey = useModifierKeyLabel(); const modifierKey = useModifierKeyLabel();
@@ -99,11 +129,23 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
readOnly: !canModify, readOnly: !canModify,
}); });
editorRef.current.onDidFocusEditorText(() => { // Workaround because otherwise the commands only work on whatever
// Workaround because otherwise the command only works on whatever
// editor was loaded on the page last. // editor was loaded on the page last.
// https://github.com/microsoft/monaco-editor/issues/2947#issuecomment-1422265201 // https://github.com/microsoft/monaco-editor/issues/2947#issuecomment-1422265201
editorRef.current?.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.Enter, onSave); editorRef.current.onDidFocusEditorText(() => {
editorRef.current?.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyS, onSave);
editorRef.current?.addCommand(
monaco.KeyMod.CtrlCmd | monaco.KeyMod.Shift | monaco.KeyCode.KeyF,
toggleFullscreen,
);
// Exit fullscreen with escape
editorRef.current?.addCommand(monaco.KeyCode.Escape, () => {
if (isFullscreen) {
toggleFullscreen();
}
});
}); });
editorRef.current.onDidChangeModelContent(checkForChanges); editorRef.current.onDidChangeModelContent(checkForChanges);
@@ -132,8 +174,40 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
}, [canModify]); }, [canModify]);
return ( return (
<Box w="100%" pos="relative"> <Box
<div id={editorId} style={{ height: "400px", width: "100%" }}></div> w="100%"
ref={containerRef}
sx={
isFullscreen
? {
position: "fixed",
top: 0,
left: 0,
right: 0,
bottom: 0,
}
: { h: "400px", w: "100%" }
}
bgColor={editorBackground}
zIndex={isFullscreen ? 1000 : "unset"}
pos="relative"
_hover={{ ".fullscreen-toggle": { opacity: 1 } }}
>
<Box id={editorId} w="100%" h="100%" />
<Tooltip label={`${modifierKey} + ⇧ + F`}>
<IconButton
className="fullscreen-toggle"
aria-label="Minimize"
icon={isFullscreen ? <FiMinimize /> : <FiMaximize />}
position="absolute"
top={2}
right={2}
onClick={toggleFullscreen}
opacity={0}
transition="opacity 0.2s"
/>
</Tooltip>
{isChanged && ( {isChanged && (
<HStack pos="absolute" bottom={2} right={2}> <HStack pos="absolute" bottom={2} right={2}>
<Button <Button
@@ -146,7 +220,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
> >
Reset Reset
</Button> </Button>
<Tooltip label={`${modifierKey} + Enter`}> <Tooltip label={`${modifierKey} + S`}>
<Button size="sm" onClick={onSave} colorScheme="blue" w={16} disabled={saveInProgress}> <Button size="sm" onClick={onSave} colorScheme="blue" w={16} disabled={saveInProgress}>
{saveInProgress ? <Spinner boxSize={4} /> : <Text>Save</Text>} {saveInProgress ? <Spinner boxSize={4} /> : <Text>Save</Text>}
</Button> </Button>

View File

@@ -7,6 +7,9 @@ import VariantHeader from "../VariantHeader/VariantHeader";
import VariantStats from "./VariantStats"; import VariantStats from "./VariantStats";
import { ScenariosHeader } from "./ScenariosHeader"; import { ScenariosHeader } from "./ScenariosHeader";
import { borders } from "./styles"; import { borders } from "./styles";
import { useScenarios } from "~/utils/hooks";
import ScenarioPaginator from "./ScenarioPaginator";
import { Fragment } from "react";
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) { export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
const variants = api.promptVariants.list.useQuery( const variants = api.promptVariants.list.useQuery(
@@ -14,17 +17,17 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
{ enabled: !!experimentId }, { enabled: !!experimentId },
); );
const scenarios = api.scenarios.list.useQuery( const scenarios = useScenarios();
{ experimentId: experimentId as string },
{ enabled: !!experimentId },
);
if (!variants.data || !scenarios.data) return null; if (!variants.data || !scenarios.data) return null;
const allCols = variants.data.length + 2; const allCols = variants.data.length + 2;
const variantHeaderRows = 3; const variantHeaderRows = 3;
const scenarioHeaderRows = 1; const scenarioHeaderRows = 1;
const allRows = variantHeaderRows + scenarioHeaderRows + scenarios.data.length; const scenarioFooterRows = 1;
const visibleScenariosCount = scenarios.data.scenarios.length;
const allRows =
variantHeaderRows + scenarioHeaderRows + visibleScenariosCount + scenarioFooterRows;
return ( return (
<Grid <Grid
@@ -49,11 +52,11 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
...borders, ...borders,
colStart: i + 2, colStart: i + 2,
borderLeftWidth: i === 0 ? 1 : 0, borderLeftWidth: i === 0 ? 1 : 0,
marginLeft: i === 0 ? "-1px" : 0,
}; };
return ( return (
<> <Fragment key={variant.uiId}>
<VariantHeader <VariantHeader
key={variant.uiId}
variant={variant} variant={variant}
canHide={variants.data.length > 1} canHide={variants.data.length > 1}
rowStart={1} rowStart={1}
@@ -65,7 +68,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
<GridItem rowStart={3} {...sharedProps}> <GridItem rowStart={3} {...sharedProps}>
<VariantStats variant={variant} /> <VariantStats variant={variant} />
</GridItem> </GridItem>
</> </Fragment>
); );
})} })}
@@ -76,18 +79,25 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
{...borders} {...borders}
borderRightWidth={0} borderRightWidth={0}
> >
<ScenariosHeader numScenarios={scenarios.data.length} /> <ScenariosHeader />
</GridItem> </GridItem>
{scenarios.data.map((scenario, i) => ( {scenarios.data.scenarios.map((scenario, i) => (
<ScenarioRow <ScenarioRow
rowStart={i + variantHeaderRows + scenarioHeaderRows + 2} rowStart={i + variantHeaderRows + scenarioHeaderRows + 2}
key={scenario.uiId} key={scenario.uiId}
scenario={scenario} scenario={scenario}
variants={variants.data} variants={variants.data}
canHide={scenarios.data.length > 1} canHide={visibleScenariosCount > 1}
/> />
))} ))}
<GridItem
rowStart={variantHeaderRows + scenarioHeaderRows + visibleScenariosCount + 2}
colStart={1}
colSpan={allCols}
>
<ScenarioPaginator />
</GridItem>
{/* Add some extra padding on the right, because when the table is too wide to fit in the viewport `pr` on the Grid isn't respected. */} {/* Add some extra padding on the right, because when the table is too wide to fit in the viewport `pr` on the Grid isn't respected. */}
<GridItem rowStart={1} colStart={allCols} rowSpan={allRows} w={4} borderBottomWidth={0} /> <GridItem rowStart={1} colStart={allCols} rowSpan={allRows} w={4} borderBottomWidth={0} />

View File

@@ -2,4 +2,4 @@ import { type RouterOutputs } from "~/utils/api";
export type PromptVariant = NonNullable<RouterOutputs["promptVariants"]["list"]>[0]; export type PromptVariant = NonNullable<RouterOutputs["promptVariants"]["list"]>[0];
export type Scenario = NonNullable<RouterOutputs["scenarios"]["list"]>[0]; export type Scenario = NonNullable<RouterOutputs["scenarios"]["list"]>["scenarios"][0];

View File

@@ -1,7 +1,8 @@
import { HStack, Icon, Heading, Text, VStack, GridItem } from "@chakra-ui/react"; import { HStack, Icon, Heading, Text, VStack, GridItem } from "@chakra-ui/react";
import { type IconType } from "react-icons"; import { type IconType } from "react-icons";
import { BsStars } from "react-icons/bs";
export const RefineOption = ({ export const RefineAction = ({
label, label,
icon, icon,
desciption, desciption,
@@ -10,7 +11,7 @@ export const RefineOption = ({
loading, loading,
}: { }: {
label: string; label: string;
icon: IconType; icon?: IconType;
desciption: string; desciption: string;
activeLabel: string | undefined; activeLabel: string | undefined;
onClick: (label: string) => void; onClick: (label: string) => void;
@@ -44,7 +45,7 @@ export const RefineOption = ({
opacity={loading ? 0.5 : 1} opacity={loading ? 0.5 : 1}
> >
<HStack cursor="pointer" spacing={6} fontSize="sm" fontWeight="medium" color="gray.500"> <HStack cursor="pointer" spacing={6} fontSize="sm" fontWeight="medium" color="gray.500">
<Icon as={icon} boxSize={12} /> <Icon as={icon || BsStars} boxSize={12} />
<Heading size="md" fontFamily="inconsolata, monospace"> <Heading size="md" fontFamily="inconsolata, monospace">
{label} {label}
</Heading> </Heading>

View File

@@ -21,10 +21,10 @@ import { type PromptVariant } from "@prisma/client";
import { useState } from "react"; import { useState } from "react";
import CompareFunctions from "./CompareFunctions"; import CompareFunctions from "./CompareFunctions";
import { CustomInstructionsInput } from "./CustomInstructionsInput"; import { CustomInstructionsInput } from "./CustomInstructionsInput";
import { type RefineOptionInfo, refineOptions } from "./refineOptions"; import { RefineAction } from "./RefineAction";
import { RefineOption } from "./RefineOption";
import { isObject, isString } from "lodash-es"; import { isObject, isString } from "lodash-es";
import { type SupportedProvider } from "~/modelProviders/types"; import { type RefinementAction, type SupportedProvider } from "~/modelProviders/types";
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
export const RefinePromptModal = ({ export const RefinePromptModal = ({
variant, variant,
@@ -35,13 +35,14 @@ export const RefinePromptModal = ({
}) => { }) => {
const utils = api.useContext(); const utils = api.useContext();
const providerRefineOptions = refineOptions[variant.modelProvider as SupportedProvider]; const refinementActions =
frontendModelProviders[variant.modelProvider as SupportedProvider].refinementActions || {};
const { mutateAsync: getModifiedPromptMutateAsync, data: refinedPromptFn } = const { mutateAsync: getModifiedPromptMutateAsync, data: refinedPromptFn } =
api.promptVariants.getModifiedPromptFn.useMutation(); api.promptVariants.getModifiedPromptFn.useMutation();
const [instructions, setInstructions] = useState<string>(""); const [instructions, setInstructions] = useState<string>("");
const [activeRefineOptionLabel, setActiveRefineOptionLabel] = useState<string | undefined>( const [activeRefineActionLabel, setActiveRefineActionLabel] = useState<string | undefined>(
undefined, undefined,
); );
@@ -49,15 +50,15 @@ export const RefinePromptModal = ({
async (label?: string) => { async (label?: string) => {
if (!variant.experimentId) return; if (!variant.experimentId) return;
const updatedInstructions = label const updatedInstructions = label
? (providerRefineOptions[label] as RefineOptionInfo).instructions ? (refinementActions[label] as RefinementAction).instructions
: instructions; : instructions;
setActiveRefineOptionLabel(label); setActiveRefineActionLabel(label);
await getModifiedPromptMutateAsync({ await getModifiedPromptMutateAsync({
id: variant.id, id: variant.id,
instructions: updatedInstructions, instructions: updatedInstructions,
}); });
}, },
[getModifiedPromptMutateAsync, onClose, variant, instructions, setActiveRefineOptionLabel], [getModifiedPromptMutateAsync, onClose, variant, instructions, setActiveRefineActionLabel],
); );
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation(); const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
@@ -95,18 +96,18 @@ export const RefinePromptModal = ({
<ModalBody maxW="unset"> <ModalBody maxW="unset">
<VStack spacing={8}> <VStack spacing={8}>
<VStack spacing={4}> <VStack spacing={4}>
{Object.keys(providerRefineOptions).length && ( {Object.keys(refinementActions).length && (
<> <>
<SimpleGrid columns={{ base: 1, md: 2 }} spacing={8}> <SimpleGrid columns={{ base: 1, md: 2 }} spacing={8}>
{Object.keys(providerRefineOptions).map((label) => ( {Object.keys(refinementActions).map((label) => (
<RefineOption <RefineAction
key={label} key={label}
label={label} label={label}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
icon={providerRefineOptions[label]!.icon} icon={refinementActions[label]!.icon}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
desciption={providerRefineOptions[label]!.description} desciption={refinementActions[label]!.description}
activeLabel={activeRefineOptionLabel} activeLabel={activeRefineActionLabel}
onClick={getModifiedPromptFn} onClick={getModifiedPromptFn}
loading={modificationInProgress} loading={modificationInProgress}
/> />

View File

@@ -1,287 +0,0 @@
// Super hacky, but we'll redo the organization when we have more models
import { type SupportedProvider } from "~/modelProviders/types";
import { VscJson } from "react-icons/vsc";
import { TfiThought } from "react-icons/tfi";
import { type IconType } from "react-icons";
export type RefineOptionInfo = { icon: IconType; description: string; instructions: string };
export const refineOptions: Record<SupportedProvider, { [key: string]: RefineOptionInfo }> = {
"openai/ChatCompletion": {
"Add chain of thought": {
icon: VscJson,
description: "Asking the model to plan its answer can increase accuracy.",
instructions: `Adding chain of thought means asking the model to think about its answer before it gives it to you. This is useful for getting more accurate answers. Do not add an assistant message.
This is what a prompt looks like before adding chain of thought:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: \`Evaluate sentiment.\`,
},
{
role: "user",
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral"\`,
},
],
});
This is what one looks like after adding chain of thought:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: \`Evaluate sentiment.\`,
},
{
role: "user",
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral". Explain your answer before you give a score, then return the score on a new line.\`,
},
],
});
Here's another example:
Before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "user",
content: \`Title: \${scenario.title}
Body: \${scenario.body}
Need: \${scenario.need}
Rate likelihood on 1-3 scale.\`,
},
],
temperature: 0,
functions: [
{
name: "score_post",
parameters: {
type: "object",
properties: {
score: {
type: "number",
},
},
},
},
],
function_call: {
name: "score_post",
},
});
After:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "user",
content: \`Title: \${scenario.title}
Body: \${scenario.body}
Need: \${scenario.need}
Rate likelihood on 1-3 scale. Provide an explanation, but always provide a score afterward.\`,
},
],
temperature: 0,
functions: [
{
name: "score_post",
parameters: {
type: "object",
properties: {
explanation: {
type: "string",
}
score: {
type: "number",
},
},
},
},
],
function_call: {
name: "score_post",
},
});
Add chain of thought to the original prompt.`,
},
"Convert to function call": {
icon: TfiThought,
description: "Use function calls to get output from the model in a more structured way.",
instructions: `OpenAI functions are a specialized way for an LLM to return output.
This is what a prompt looks like before adding a function:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: \`Evaluate sentiment.\`,
},
{
role: "user",
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral"\`,
},
],
});
This is what one looks like after adding a function:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: "Evaluate sentiment.",
},
{
role: "user",
content: scenario.user_message,
},
],
functions: [
{
name: "extract_sentiment",
parameters: {
type: "object", // parameters must always be an object with a properties key
properties: { // properties key is required
sentiment: {
type: "string",
description: "one of positive/negative/neutral",
},
},
},
},
],
function_call: {
name: "extract_sentiment",
},
});
Here's another example of adding a function:
Before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "user",
content: \`Here is the title and body of a reddit post I am interested in:
title: \${scenario.title}
body: \${scenario.body}
On a scale from 1 to 3, how likely is it that the person writing this post has the following need? If you are not sure, make your best guess, or answer 1.
Need: \${scenario.need}
Answer one integer between 1 and 3.\`,
},
],
temperature: 0,
});
After:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "user",
content: \`Title: \${scenario.title}
Body: \${scenario.body}
Need: \${scenario.need}
Rate likelihood on 1-3 scale.\`,
},
],
temperature: 0,
functions: [
{
name: "score_post",
parameters: {
type: "object",
properties: {
score: {
type: "number",
},
},
},
},
],
function_call: {
name: "score_post",
},
});
Another example
Before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
stream: true,
messages: [
{
role: "system",
content: \`Write 'Start experimenting!' in \${scenario.language}\`,
},
],
});
After:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "system",
content: \`Write 'Start experimenting!' in \${scenario.language}\`,
},
],
functions: [
{
name: "write_in_language",
parameters: {
type: "object",
properties: {
text: {
type: "string",
},
},
},
},
],
function_call: {
name: "write_in_language",
},
});
Add an OpenAI function that takes one or more nested parameters that match the expected output from this prompt.`,
},
},
"replicate/llama2": {},
};

View File

@@ -2,6 +2,7 @@ import { type JsonValue } from "type-fest";
import { type SupportedModel } from "."; import { type SupportedModel } from ".";
import { type FrontendModelProvider } from "../types"; import { type FrontendModelProvider } from "../types";
import { type ChatCompletion } from "openai/resources/chat"; import { type ChatCompletion } from "openai/resources/chat";
import { refinementActions } from "./refinementActions";
const frontendModelProvider: FrontendModelProvider<SupportedModel, ChatCompletion> = { const frontendModelProvider: FrontendModelProvider<SupportedModel, ChatCompletion> = {
name: "OpenAI ChatCompletion", name: "OpenAI ChatCompletion",
@@ -45,6 +46,8 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, ChatCompletio
}, },
}, },
refinementActions,
normalizeOutput: (output) => { normalizeOutput: (output) => {
const message = output.choices[0]?.message; const message = output.choices[0]?.message;
if (!message) if (!message)

View File

@@ -0,0 +1,279 @@
import { TfiThought } from "react-icons/tfi";
import { type RefinementAction } from "../types";
import { VscJson } from "react-icons/vsc";
export const refinementActions: Record<string, RefinementAction> = {
"Add chain of thought": {
icon: VscJson,
description: "Asking the model to plan its answer can increase accuracy.",
instructions: `Adding chain of thought means asking the model to think about its answer before it gives it to you. This is useful for getting more accurate answers. Do not add an assistant message.
This is what a prompt looks like before adding chain of thought:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: \`Evaluate sentiment.\`,
},
{
role: "user",
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral"\`,
},
],
});
This is what one looks like after adding chain of thought:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: \`Evaluate sentiment.\`,
},
{
role: "user",
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral". Explain your answer before you give a score, then return the score on a new line.\`,
},
],
});
Here's another example:
Before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "user",
content: \`Title: \${scenario.title}
Body: \${scenario.body}
Need: \${scenario.need}
Rate likelihood on 1-3 scale.\`,
},
],
temperature: 0,
functions: [
{
name: "score_post",
parameters: {
type: "object",
properties: {
score: {
type: "number",
},
},
},
},
],
function_call: {
name: "score_post",
},
});
After:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "user",
content: \`Title: \${scenario.title}
Body: \${scenario.body}
Need: \${scenario.need}
Rate likelihood on 1-3 scale. Provide an explanation, but always provide a score afterward.\`,
},
],
temperature: 0,
functions: [
{
name: "score_post",
parameters: {
type: "object",
properties: {
explanation: {
type: "string",
}
score: {
type: "number",
},
},
},
},
],
function_call: {
name: "score_post",
},
});
Add chain of thought to the original prompt.`,
},
"Convert to function call": {
icon: TfiThought,
description: "Use function calls to get output from the model in a more structured way.",
instructions: `OpenAI functions are a specialized way for an LLM to return output.
This is what a prompt looks like before adding a function:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: \`Evaluate sentiment.\`,
},
{
role: "user",
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral"\`,
},
],
});
This is what one looks like after adding a function:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: "Evaluate sentiment.",
},
{
role: "user",
content: scenario.user_message,
},
],
functions: [
{
name: "extract_sentiment",
parameters: {
type: "object", // parameters must always be an object with a properties key
properties: { // properties key is required
sentiment: {
type: "string",
description: "one of positive/negative/neutral",
},
},
},
},
],
function_call: {
name: "extract_sentiment",
},
});
Here's another example of adding a function:
Before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "user",
content: \`Here is the title and body of a reddit post I am interested in:
title: \${scenario.title}
body: \${scenario.body}
On a scale from 1 to 3, how likely is it that the person writing this post has the following need? If you are not sure, make your best guess, or answer 1.
Need: \${scenario.need}
Answer one integer between 1 and 3.\`,
},
],
temperature: 0,
});
After:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "user",
content: \`Title: \${scenario.title}
Body: \${scenario.body}
Need: \${scenario.need}
Rate likelihood on 1-3 scale.\`,
},
],
temperature: 0,
functions: [
{
name: "score_post",
parameters: {
type: "object",
properties: {
score: {
type: "number",
},
},
},
},
],
function_call: {
name: "score_post",
},
});
Another example
Before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
stream: true,
messages: [
{
role: "system",
content: \`Write 'Start experimenting!' in \${scenario.language}\`,
},
],
});
After:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
messages: [
{
role: "system",
content: \`Write 'Start experimenting!' in \${scenario.language}\`,
},
],
functions: [
{
name: "write_in_language",
parameters: {
type: "object",
properties: {
text: {
type: "string",
},
},
},
},
],
function_call: {
name: "write_in_language",
},
});
Add an OpenAI function that takes one or more nested parameters that match the expected output from this prompt.`,
},
};

View File

@@ -1,5 +1,6 @@
import { type SupportedModel, type ReplicateLlama2Output } from "."; import { type SupportedModel, type ReplicateLlama2Output } from ".";
import { type FrontendModelProvider } from "../types"; import { type FrontendModelProvider } from "../types";
import { refinementActions } from "./refinementActions";
const frontendModelProvider: FrontendModelProvider<SupportedModel, ReplicateLlama2Output> = { const frontendModelProvider: FrontendModelProvider<SupportedModel, ReplicateLlama2Output> = {
name: "Replicate Llama2", name: "Replicate Llama2",
@@ -31,6 +32,8 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, ReplicateLlam
}, },
}, },
refinementActions,
normalizeOutput: (output) => { normalizeOutput: (output) => {
return { return {
type: "text", type: "text",

View File

@@ -27,8 +27,6 @@ export async function getCompletion(
input: rest, input: rest,
}); });
console.log("stream?", onStream);
const interval = onStream const interval = onStream
? // eslint-disable-next-line @typescript-eslint/no-misused-promises ? // eslint-disable-next-line @typescript-eslint/no-misused-promises
setInterval(async () => { setInterval(async () => {

View File

@@ -38,26 +38,42 @@ const modelProvider: ReplicateLlama2Provider = {
type: "string", type: "string",
enum: supportedModels as unknown as string[], enum: supportedModels as unknown as string[],
}, },
system_prompt: {
type: "string",
description:
"System prompt to send to Llama v2. This is prepended to the prompt and helps guide system behavior.",
},
prompt: { prompt: {
type: "string", type: "string",
description: "Prompt to send to Llama v2.",
}, },
stream: { stream: {
type: "boolean", type: "boolean",
description: "Whether to stream output from Llama v2.",
}, },
max_length: { max_new_tokens: {
type: "number", type: "number",
description:
"Maximum number of tokens to generate. A word is generally 2-3 tokens (minimum: 1)",
}, },
temperature: { temperature: {
type: "number", type: "number",
description:
"Adjusts randomness of outputs, greater than 1 is random and 0 is deterministic, 0.75 is a good starting value. (minimum: 0.01; maximum: 5)",
}, },
top_p: { top_p: {
type: "number", type: "number",
description:
"When decoding text, samples from the top p percentage of most likely tokens; lower to ignore less likely tokens (minimum: 0.01; maximum: 1)",
}, },
repetition_penalty: { repetition_penalty: {
type: "number", type: "number",
description:
"Penalty for repeated words in generated text; 1 is no penalty, values greater than 1 discourage repetition, less than 1 encourage it. (minimum: 0.01; maximum: 5)",
}, },
debug: { debug: {
type: "boolean", type: "boolean",
description: "provide debugging output in logs",
}, },
}, },
required: ["model", "prompt"], required: ["model", "prompt"],

View File

@@ -0,0 +1,3 @@
import { type RefinementAction } from "../types";
export const refinementActions: Record<string, RefinementAction> = {};

View File

@@ -1,4 +1,5 @@
import { type JSONSchema4 } from "json-schema"; import { type JSONSchema4 } from "json-schema";
import { type IconType } from "react-icons";
import { type JsonValue } from "type-fest"; import { type JsonValue } from "type-fest";
import { z } from "zod"; import { z } from "zod";
@@ -23,9 +24,12 @@ export const ZodModel = z.object({
export type Model = z.infer<typeof ZodModel>; export type Model = z.infer<typeof ZodModel>;
export type RefinementAction = { icon?: IconType; description: string; instructions: string };
export type FrontendModelProvider<SupportedModels extends string, OutputSchema> = { export type FrontendModelProvider<SupportedModels extends string, OutputSchema> = {
name: string; name: string;
models: Record<SupportedModels, Model>; models: Record<SupportedModels, Model>;
refinementActions?: Record<string, RefinementAction>;
normalizeOutput: (output: OutputSchema) => NormalizedOutput; normalizeOutput: (output: OutputSchema) => NormalizedOutput;
}; };

View File

@@ -7,6 +7,8 @@ import "~/utils/analytics";
import Head from "next/head"; import Head from "next/head";
import { ChakraThemeProvider } from "~/theme/ChakraThemeProvider"; import { ChakraThemeProvider } from "~/theme/ChakraThemeProvider";
import { SyncAppStore } from "~/state/sync"; import { SyncAppStore } from "~/state/sync";
import NextAdapterApp from "next-query-params/app";
import { QueryParamProvider } from "use-query-params";
const MyApp: AppType<{ session: Session | null }> = ({ const MyApp: AppType<{ session: Session | null }> = ({
Component, Component,
@@ -24,7 +26,9 @@ const MyApp: AppType<{ session: Session | null }> = ({
<SyncAppStore /> <SyncAppStore />
<Favicon /> <Favicon />
<ChakraThemeProvider> <ChakraThemeProvider>
<QueryParamProvider adapter={NextAdapterApp}>
<Component {...pageProps} /> <Component {...pageProps} />
</QueryParamProvider>
</ChakraThemeProvider> </ChakraThemeProvider>
</SessionProvider> </SessionProvider>
</> </>

View File

@@ -49,10 +49,6 @@ const DeleteButton = () => {
onClose(); onClose();
}, [mutation, experiment.data?.id, router]); }, [mutation, experiment.data?.id, router]);
useEffect(() => {
useAppStore.getState().sharedVariantEditor.loadMonaco().catch(console.error);
});
return ( return (
<> <>
<Button <Button
@@ -102,6 +98,10 @@ export default function Experiment() {
const openDrawer = useAppStore((s) => s.openDrawer); const openDrawer = useAppStore((s) => s.openDrawer);
useSyncVariantEditor(); useSyncVariantEditor();
useEffect(() => {
useAppStore.getState().sharedVariantEditor.loadMonaco().catch(console.error);
});
const [label, setLabel] = useState(experiment.data?.label || ""); const [label, setLabel] = useState(experiment.data?.label || "");
useEffect(() => { useEffect(() => {
setLabel(experiment.data?.label || ""); setLabel(experiment.data?.label || "");

View File

@@ -7,21 +7,39 @@ import { runAllEvals } from "~/server/utils/evaluations";
import { generateNewCell } from "~/server/utils/generateNewCell"; import { generateNewCell } from "~/server/utils/generateNewCell";
import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl"; import { requireCanModifyExperiment, requireCanViewExperiment } from "~/utils/accessControl";
const PAGE_SIZE = 10;
export const scenariosRouter = createTRPCRouter({ export const scenariosRouter = createTRPCRouter({
list: publicProcedure list: publicProcedure
.input(z.object({ experimentId: z.string() })) .input(z.object({ experimentId: z.string(), page: z.number() }))
.query(async ({ input, ctx }) => { .query(async ({ input, ctx }) => {
await requireCanViewExperiment(input.experimentId, ctx); await requireCanViewExperiment(input.experimentId, ctx);
return await prisma.testScenario.findMany({ const { experimentId, page } = input;
const scenarios = await prisma.testScenario.findMany({
where: { where: {
experimentId: input.experimentId, experimentId,
visible: true, visible: true,
}, },
orderBy: { orderBy: { sortIndex: "asc" },
sortIndex: "asc", skip: (page - 1) * PAGE_SIZE,
take: PAGE_SIZE,
});
const count = await prisma.testScenario.count({
where: {
experimentId,
visible: true,
}, },
}); });
return {
scenarios,
startIndex: (page - 1) * PAGE_SIZE + 1,
lastPage: Math.ceil(count / PAGE_SIZE),
count,
};
}), }),
create: protectedProcedure create: protectedProcedure

View File

@@ -74,6 +74,11 @@ const requestUpdatedPromptFunction = async (
2, 2,
)}`, )}`,
}); });
} else {
messages.push({
role: "user",
content: `The provider is the same as the old provider: ${originalModel.provider}`,
});
} }
} }
if (instructions) { if (instructions) {

View File

@@ -8,9 +8,9 @@ export const editorBackground = "#fafafa";
export type SharedVariantEditorSlice = { export type SharedVariantEditorSlice = {
monaco: null | ReturnType<typeof loader.__getMonacoInstance>; monaco: null | ReturnType<typeof loader.__getMonacoInstance>;
loadMonaco: () => Promise<void>; loadMonaco: () => Promise<void>;
scenarios: RouterOutputs["scenarios"]["list"]; scenarios: RouterOutputs["scenarios"]["list"]["scenarios"];
updateScenariosModel: () => void; updateScenariosModel: () => void;
setScenarios: (scenarios: RouterOutputs["scenarios"]["list"]) => void; setScenarios: (scenarios: RouterOutputs["scenarios"]["list"]["scenarios"]) => void;
}; };
export const createVariantEditorSlice: SliceCreator<SharedVariantEditorSlice> = (set, get) => ({ export const createVariantEditorSlice: SliceCreator<SharedVariantEditorSlice> = (set, get) => ({

View File

@@ -1,17 +1,14 @@
import { useEffect } from "react"; import { useEffect } from "react";
import { api } from "~/utils/api"; import { api } from "~/utils/api";
import { useExperiment } from "~/utils/hooks"; import { useScenarios } from "~/utils/hooks";
import { useAppStore } from "./store"; import { useAppStore } from "./store";
export function useSyncVariantEditor() { export function useSyncVariantEditor() {
const experiment = useExperiment(); const scenarios = useScenarios();
const scenarios = api.scenarios.list.useQuery(
{ experimentId: experiment.data?.id ?? "" },
{ enabled: !!experiment.data?.id },
);
useEffect(() => { useEffect(() => {
if (scenarios.data) { if (scenarios.data) {
useAppStore.getState().sharedVariantEditor.setScenarios(scenarios.data); useAppStore.getState().sharedVariantEditor.setScenarios(scenarios.data.scenarios);
} }
}, [scenarios.data]); }, [scenarios.data]);
} }

View File

@@ -1,6 +1,7 @@
import { useRouter } from "next/router"; import { useRouter } from "next/router";
import { type RefObject, useCallback, useEffect, useRef, useState } from "react"; import { type RefObject, useCallback, useEffect, useRef, useState } from "react";
import { api } from "~/utils/api"; import { api } from "~/utils/api";
import { NumberParam, useQueryParam, withDefault } from "use-query-params";
export const useExperiment = () => { export const useExperiment = () => {
const router = useRouter(); const router = useRouter();
@@ -93,3 +94,15 @@ export const useElementDimensions = (): [RefObject<HTMLElement>, Dimensions | un
return [ref, dimensions]; return [ref, dimensions];
}; };
export const usePage = () => useQueryParam("page", withDefault(NumberParam, 1));
export const useScenarios = () => {
const experiment = useExperiment();
const [page] = usePage();
return api.scenarios.list.useQuery(
{ experimentId: experiment.data?.id ?? "", page },
{ enabled: experiment.data?.id != null },
);
};