Add new predefined refinement options (#67)

* Add new predefined refinement options

* Fix prettier

* Add icon to SelectModelModal title
This commit is contained in:
arcticfly
2023-07-19 20:10:08 -07:00
committed by GitHub
parent 6e3f90cd2f
commit e598e454d0
9 changed files with 281 additions and 134 deletions

View File

@@ -42,8 +42,7 @@ export default function OutputCell({
{ refetchInterval }, { refetchInterval },
); );
const { mutateAsync: hardRefetchMutate } = const { mutateAsync: hardRefetchMutate } = api.scenarioVariantCells.forceRefetch.useMutation();
api.scenarioVariantCells.forceRefetch.useMutation();
const [hardRefetch, hardRefetching] = useHandledAsyncCallback(async () => { const [hardRefetch, hardRefetching] = useHandledAsyncCallback(async () => {
await hardRefetchMutate({ scenarioId: scenario.id, variantId: variant.id }); await hardRefetchMutate({ scenarioId: scenario.id, variantId: variant.id });
await utils.scenarioVariantCells.get.invalidate({ await utils.scenarioVariantCells.get.invalidate({

View File

@@ -35,7 +35,7 @@ const CompareFunctions = ({
return ( return (
<HStack w="full" spacing={5}> <HStack w="full" spacing={5}>
<VStack w="full" spacing={4} maxH="50vh" fontSize={12} lineHeight={1} overflowY="auto"> <VStack w="full" spacing={4} maxH="40vh" fontSize={12} lineHeight={1} overflowY="auto">
<DiffViewer <DiffViewer
oldValue={originalFunction} oldValue={originalFunction}
newValue={newFunction || originalFunction} newValue={newFunction || originalFunction}

View File

@@ -0,0 +1,75 @@
import { Button, Spinner, InputGroup, InputRightElement, Icon, HStack } from "@chakra-ui/react";
import { IoMdSend } from "react-icons/io";
import AutoResizeTextArea from "../AutoResizeTextArea";
export const CustomInstructionsInput = ({
instructions,
setInstructions,
loading,
onSubmit,
}: {
instructions: string;
setInstructions: (instructions: string) => void;
loading: boolean;
onSubmit: () => void;
}) => {
return (
<InputGroup
size="md"
w="full"
maxW="600"
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);"
borderRadius={8}
alignItems="center"
colorScheme="orange"
>
<AutoResizeTextArea
value={instructions}
onChange={(e) => setInstructions(e.target.value)}
onKeyDown={(e) => {
if (e.key === "Enter" && !e.metaKey && !e.ctrlKey && !e.shiftKey) {
e.preventDefault();
e.currentTarget.blur();
onSubmit();
}
}}
placeholder="Send custom instructions"
py={4}
pl={4}
pr={12}
colorScheme="orange"
borderColor="gray.300"
borderWidth={1}
_hover={{
borderColor: "gray.300",
}}
_focus={{
borderColor: "gray.300",
}}
isDisabled={loading}
/>
<HStack></HStack>
<InputRightElement width="8" height="full">
<Button
h="8"
w="8"
minW="unset"
size="sm"
onClick={() => onSubmit()}
disabled={!instructions}
variant={instructions ? "solid" : "ghost"}
mr={4}
borderRadius="8"
bgColor={instructions ? "orange.400" : "transparent"}
colorScheme="orange"
>
{loading ? (
<Spinner boxSize={4} />
) : (
<Icon as={IoMdSend} color={instructions ? "white" : "gray.500"} boxSize={5} />
)}
</Button>
</InputRightElement>
</InputGroup>
);
};

View File

@@ -0,0 +1,64 @@
import { HStack, Icon, Heading, Text, VStack, GridItem } from "@chakra-ui/react";
import { type IconType } from "react-icons";
import { refineOptions, type RefineOptionLabel } from "./refineOptions";
export const RefineOption = ({
label,
activeLabel,
icon,
onClick,
loading,
}: {
label: RefineOptionLabel;
activeLabel: RefineOptionLabel | undefined;
icon: IconType;
onClick: (label: RefineOptionLabel) => void;
loading: boolean;
}) => {
const isActive = activeLabel === label;
const desciption = refineOptions[label].description;
return (
<GridItem w="80" h="44">
<VStack
w="full"
h="full"
onClick={() => {
!loading && onClick(label);
}}
borderColor={isActive ? "blue.500" : "gray.200"}
borderWidth={2}
borderRadius={16}
padding={6}
backgroundColor="gray.50"
_hover={
loading
? undefined
: {
backgroundColor: "gray.100",
}
}
spacing={8}
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);"
cursor="pointer"
opacity={loading ? 0.5 : 1}
>
<HStack cursor="pointer" spacing={6} fontSize="sm" fontWeight="medium" color="gray.500">
<Icon as={icon} boxSize={12} />
<Heading size="md" fontFamily="inconsolata, monospace">
{label}
</Heading>
</HStack>
<Text
fontSize="sm"
color="gray.500"
flexWrap="wrap"
wordBreak="break-word"
overflowWrap="break-word"
>
{desciption}
</Text>
</VStack>
</GridItem>
);
};

View File

@@ -11,17 +11,20 @@ import {
Text, Text,
Spinner, Spinner,
HStack, HStack,
InputGroup,
InputRightElement,
Icon, Icon,
SimpleGrid,
} from "@chakra-ui/react"; } from "@chakra-ui/react";
import { IoMdSend } from "react-icons/io"; import { BsStars } from "react-icons/bs";
import { VscJson } from "react-icons/vsc";
import { TfiThought } from "react-icons/tfi";
import { api } from "~/utils/api"; import { api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks"; import { useHandledAsyncCallback } from "~/utils/hooks";
import { type PromptVariant } from "@prisma/client"; import { type PromptVariant } from "@prisma/client";
import { useState } from "react"; import { useState } from "react";
import CompareFunctions from "./CompareFunctions"; import CompareFunctions from "./CompareFunctions";
import AutoResizeTextArea from "../AutoResizeTextArea"; import { CustomInstructionsInput } from "./CustomInstructionsInput";
import { type RefineOptionLabel, refineOptions } from "./refineOptions";
import { RefineOption } from "./RefineOption";
export const RefinePromptModal = ({ export const RefinePromptModal = ({
variant, variant,
@@ -36,13 +39,22 @@ export const RefinePromptModal = ({
api.promptVariants.getRefinedPromptFn.useMutation(); api.promptVariants.getRefinedPromptFn.useMutation();
const [instructions, setInstructions] = useState<string>(""); const [instructions, setInstructions] = useState<string>("");
const [getRefinedPromptFn, refiningInProgress] = useHandledAsyncCallback(async () => { const [activeRefineOptionLabel, setActiveRefineOptionLabel] = useState<
if (!variant.experimentId) return; RefineOptionLabel | undefined
await getRefinedPromptMutateAsync({ >(undefined);
id: variant.id,
instructions, const [getRefinedPromptFn, refiningInProgress] = useHandledAsyncCallback(
}); async (label?: RefineOptionLabel) => {
}, [getRefinedPromptMutateAsync, onClose, variant, instructions]); if (!variant.experimentId) return;
const updatedInstructions = label ? refineOptions[label].instructions : instructions;
setActiveRefineOptionLabel(label);
await getRefinedPromptMutateAsync({
id: variant.id,
instructions: updatedInstructions,
});
},
[getRefinedPromptMutateAsync, onClose, variant, instructions, setActiveRefineOptionLabel],
);
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation(); const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
@@ -60,65 +72,42 @@ export const RefinePromptModal = ({
<Modal isOpen onClose={onClose} size={{ base: "xl", sm: "2xl", md: "7xl" }}> <Modal isOpen onClose={onClose} size={{ base: "xl", sm: "2xl", md: "7xl" }}>
<ModalOverlay /> <ModalOverlay />
<ModalContent w={1200}> <ModalContent w={1200}>
<ModalHeader>Refine with GPT-4</ModalHeader> <ModalHeader>
<HStack>
<Icon as={BsStars} />
<Text>Refine with GPT-4</Text>
</HStack>
</ModalHeader>
<ModalCloseButton /> <ModalCloseButton />
<ModalBody maxW="unset"> <ModalBody maxW="unset">
<VStack spacing={16} pt={8}> <VStack spacing={8}>
<InputGroup <VStack spacing={4}>
size="md" <SimpleGrid columns={{ base: 1, md: 2 }} spacing={8}>
w="full" <RefineOption
maxW="600" label="Convert to function call"
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);" activeLabel={activeRefineOptionLabel}
borderRadius={8} icon={VscJson}
alignItems="center"
colorScheme="orange"
>
<AutoResizeTextArea
value={instructions}
onChange={(e) => setInstructions(e.target.value)}
onKeyDown={(e) => {
if (e.key === "Enter" && !e.metaKey && !e.ctrlKey && !e.shiftKey) {
e.preventDefault();
e.currentTarget.blur();
getRefinedPromptFn();
}
}}
placeholder="Send instructions"
py={4}
pl={4}
pr={12}
colorScheme="orange"
borderColor="gray.300"
borderWidth={1}
_hover={{
borderColor: "gray.300",
}}
_focus={{
borderColor: "gray.300",
}}
/>
<InputRightElement width="8" height="full">
<Button
h="8"
w="8"
minW="unset"
size="sm"
onClick={getRefinedPromptFn} onClick={getRefinedPromptFn}
disabled={!instructions} loading={refiningInProgress}
variant={instructions ? "solid" : "ghost"} />
mr={4} <RefineOption
borderRadius="8" label="Add chain of thought"
bgColor={instructions ? "orange.400" : "transparent"} activeLabel={activeRefineOptionLabel}
colorScheme="orange" icon={TfiThought}
> onClick={getRefinedPromptFn}
{refiningInProgress ? ( loading={refiningInProgress}
<Spinner boxSize={4} /> />
) : ( </SimpleGrid>
<Icon as={IoMdSend} color={instructions ? "white" : "gray.500"} boxSize={5} /> <HStack>
)} <Text color="gray.500">or</Text>
</Button> </HStack>
</InputRightElement> <CustomInstructionsInput
</InputGroup> instructions={instructions}
setInstructions={setInstructions}
loading={refiningInProgress}
onSubmit={getRefinedPromptFn}
/>
</VStack>
<CompareFunctions <CompareFunctions
originalFunction={variant.constructFn} originalFunction={variant.constructFn}
newFunction={refinedPromptFn} newFunction={refinedPromptFn}
@@ -127,11 +116,11 @@ export const RefinePromptModal = ({
</ModalBody> </ModalBody>
<ModalFooter> <ModalFooter>
<HStack spacing={4} pt={8}> <HStack spacing={4}>
<Button <Button
onClick={replaceVariant} onClick={replaceVariant}
minW={24} minW={24}
disabled={true} disabled={replacementInProgress || !refinedPromptFn}
_disabled={{ _disabled={{
bgColor: "blue.500", bgColor: "blue.500",
}} }}

View File

@@ -0,0 +1,70 @@
// Super hacky, but we'll redo the organization when we have more models
export type RefineOptionLabel = "Add chain of thought" | "Convert to function call";
export const refineOptions: Record<
RefineOptionLabel,
{ description: string; instructions: string }
> = {
"Add chain of thought": {
description: "Asks the model to think about its answer before it gives it to you.",
instructions: `Adding chain of thought means asking the model to think about its answer before it gives it to you. This is useful for getting more accurate answers. Do not add an assistant message.
Add chain of thought to the original prompt.`,
},
"Convert to function call": {
description: "Converts the prompt to a function call.",
instructions: `Function calls are a specific way for an LLM to return output. This is what a prompt looks like without using function calls:
prompt = {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: \`Evaluate sentiment.\`,
},
{
role: "user",
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral"\`,
},
],
};
This is what one looks like using function calls:
prompt = {
model: "gpt-3.5-turbo-0613",
stream: true,
messages: [
{
role: "system",
content: "Evaluate sentiment.",
},
{
role: "user",
content: scenario.user_message,
},
],
functions: [
{
name: "extract_sentiment",
parameters: {
type: "object", // parameters must always be an object with a properties key
properties: { // properties key is required
sentiment: {
type: "string",
description: "one of positive/negative/neutral",
},
},
},
},
],
function_call: {
name: "extract_sentiment",
},
};
Add a function call that takes one or more nested parameters.`,
},
};

View File

@@ -10,7 +10,10 @@ import {
VStack, VStack,
Text, Text,
Spinner, Spinner,
HStack,
Icon,
} from "@chakra-ui/react"; } from "@chakra-ui/react";
import { RiExchangeFundsFill } from "react-icons/ri";
import { useState } from "react"; import { useState } from "react";
import { type SupportedModel } from "~/server/types"; import { type SupportedModel } from "~/server/types";
import { ModelStatsCard } from "./ModelStatsCard"; import { ModelStatsCard } from "./ModelStatsCard";
@@ -49,7 +52,12 @@ export const SelectModelModal = ({
<Modal isOpen onClose={onClose} size={{ base: "xl", sm: "2xl", md: "3xl" }}> <Modal isOpen onClose={onClose} size={{ base: "xl", sm: "2xl", md: "3xl" }}>
<ModalOverlay /> <ModalOverlay />
<ModalContent w={1200}> <ModalContent w={1200}>
<ModalHeader>Select a New Model</ModalHeader> <ModalHeader>
<HStack>
<Icon as={RiExchangeFundsFill} />
<Text>Change Model</Text>
</HStack>
</ModalHeader>
<ModalCloseButton /> <ModalCloseButton />
<ModalBody maxW="unset"> <ModalBody maxW="unset">
<VStack spacing={8}> <VStack spacing={8}>

View File

@@ -12,9 +12,8 @@ import {
Text, Text,
Spinner, Spinner,
} from "@chakra-ui/react"; } from "@chakra-ui/react";
import { BsFillTrashFill, BsGear } from "react-icons/bs"; import { BsFillTrashFill, BsGear, BsStars } from "react-icons/bs";
import { FaRegClone } from "react-icons/fa"; import { FaRegClone } from "react-icons/fa";
import { AiOutlineDiff } from "react-icons/ai";
import { useState } from "react"; import { useState } from "react";
import { RefinePromptModal } from "../RefinePromptModal/RefinePromptModal"; import { RefinePromptModal } from "../RefinePromptModal/RefinePromptModal";
import { RiExchangeFundsFill } from "react-icons/ri"; import { RiExchangeFundsFill } from "react-icons/ri";
@@ -79,7 +78,7 @@ export default function VariantHeaderMenuButton({
Change Model Change Model
</MenuItem> </MenuItem>
<MenuItem <MenuItem
icon={<Icon as={AiOutlineDiff} boxSize={5} />} icon={<Icon as={BsStars} boxSize={5} />}
onClick={() => setRefinePromptModalOpen(true)} onClick={() => setRefinePromptModalOpen(true)}
> >
Refine Refine

View File

@@ -40,59 +40,6 @@ const requestUpdatedPromptFunction = async (
) => { ) => {
const originalModel = originalVariant.model as SupportedModel; const originalModel = originalVariant.model as SupportedModel;
let newContructionFn = ""; let newContructionFn = "";
const usefulTips = `Adding chain of thought means asking the model to think about its answer before it gives it to you. This is useful for getting more accurate answers. Do not add an assistant message.
Function calls are a specific way for an LLM to return output. This is what a prompt looks like without using function calls:
prompt = {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: \`Evaluate sentiment.\`,
},
{
role: "user",
content: \`This is the user's message: \${scenario.user_message}. Return "positive" or "negative" or "neutral"\`,
},
],
};
This is what one looks like using function calls:
prompt = {
model: "gpt-3.5-turbo-0613",
stream: true,
messages: [
{
role: "system",
content: "Evaluate sentiment.",
},
{
role: "user",
content: scenario.user_message,
},
],
functions: [
{
name: "extract_sentiment",
parameters: {
type: "object", // parameters must always be an object with a properties key
properties: { // properties key is required
sentiment: {
type: "string",
description: "one of positive/negative/neutral",
},
},
},
},
],
function_call: {
name: "extract_sentiment",
},
};
`;
for (let i = 0; i < NUM_RETRIES; i++) { for (let i = 0; i < NUM_RETRIES; i++) {
try { try {
const messages: CompletionCreateParams.CreateChatCompletionRequestNonStreaming.Message[] = [ const messages: CompletionCreateParams.CreateChatCompletionRequestNonStreaming.Message[] = [
@@ -112,13 +59,9 @@ const requestUpdatedPromptFunction = async (
}); });
} }
if (instructions) { if (instructions) {
messages.push({
role: "system",
content: `Here is some useful information about prompt engineering: ${usefulTips}`,
});
messages.push({ messages.push({
role: "user", role: "user",
content: `Follow these instructions: ${instructions}`, content: instructions,
}); });
} }
messages.push({ messages.push({