Replace function chrome with comment

Use a block comment to explain the expected prompt formatting instead of function chrome. The advantage here is that once a user builds a mental model of how OpenPipe works they can just delete the comment, instead of the function chrome sitting around and taking up space in the UI forever.
This commit is contained in:
Kyle Corbitt
2023-07-17 10:30:22 -07:00
parent 64bd71e370
commit 8db8aeacd3
2 changed files with 22 additions and 18 deletions

View File

@@ -1,10 +1,9 @@
import { Box, Button, HStack, Tooltip, VStack, useToast } from "@chakra-ui/react";
import { Box, Button, HStack, Tooltip, useToast } from "@chakra-ui/react";
import { useRef, useEffect, useState, useCallback } from "react";
import { useHandledAsyncCallback, useModifierKeyLabel } from "~/utils/hooks";
import { type PromptVariant } from "./types";
import { api } from "~/utils/api";
import { useAppStore } from "~/state/store";
import { editorBackground } from "~/state/sharedVariantEditor.slice";
export default function VariantEditor(props: { variant: PromptVariant }) {
const monaco = useAppStore.use.sharedVariantEditor.monaco();
@@ -133,19 +132,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
return (
<Box w="100%" pos="relative">
<VStack
spacing={0}
align="stretch"
fontSize="xs"
fontWeight="bold"
color="gray.600"
py={2}
bgColor={editorBackground}
>
<code>{`function constructPrompt(scenario: Scenario): Prompt {`}</code>
<div id={editorId} style={{ height: "300px", width: "100%" }}></div>
<code>{`return prompt; }`}</code>
</VStack>
<div id={editorId} style={{ height: "400px", width: "100%" }}></div>
{isChanged && (
<HStack pos="absolute" bottom={2} right={2}>
<Button

View File

@@ -71,11 +71,28 @@ export const experimentsRouter = createTRPCRouter({
experimentId: exp.id,
label: "Prompt Variant 1",
sortIndex: 0,
constructFn: dedent`prompt = {
// The interpolated $ is necessary until dedent incorporates
// https://github.com/dmnd/dedent/pull/46
constructFn: dedent`
/**
* Use Javascript to define an OpenAI chat completion
* (https://platform.openai.com/docs/api-reference/chat/create) and
* assign it to the \`prompt\` variable.
*
* You have access to the current scenario in the \`scenario\`
* variable.
*/
prompt = {
model: "gpt-3.5-turbo-0613",
stream: true,
messages: [{ role: "system", content: ${"`Return '${scenario.text}'`"} }],
}`,
messages: [
{
role: "system",
content: \`"Return 'this is output for the scenario "${"$"}{scenario.text}"'\`,
},
],
};`,
model: "gpt-3.5-turbo-0613",
},
}),