Save and display timeToComplete on model outputs (#6)

* Calculate and save timeToComplete on model outputs

* Add output stats to function call as well

* Record timeToComplete before parsing response json

* Add default value for timeToComplete
This commit is contained in:
arcticfly
2023-06-30 21:29:28 -07:00
committed by GitHub
parent 625d34a9e0
commit 6389bd54de
5 changed files with 44 additions and 18 deletions

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "ModelOutput" ADD COLUMN "timeToComplete" INTEGER NOT NULL DEFAULT 0;

View File

@@ -80,6 +80,7 @@ model ModelOutput {
output Json
statusCode Int
errorMessage String?
timeToComplete Int @default(0)
promptVariantId String @db.Uuid
promptVariant PromptVariant @relation(fields: [promptVariantId], references: [id], onDelete: Cascade)

View File

@@ -1,12 +1,14 @@
import { api } from "~/utils/api";
import { type PromptVariant, type Scenario } from "./types";
import { Spinner, Text, Box, Center } from "@chakra-ui/react";
import { Spinner, Text, Box, Center, Flex, Icon } from "@chakra-ui/react";
import { useExperiment } from "~/utils/hooks";
import { type CreateChatCompletionResponse } from "openai";
import SyntaxHighlighter from "react-syntax-highlighter";
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
import stringify from "json-stringify-pretty-compact";
import { type ReactElement } from "react";
import { BsClock } from "react-icons/bs";
import { type ModelOutput } from "@prisma/client";
export default function OutputCell({
scenario,
@@ -22,13 +24,11 @@ export default function OutputCell({
const scenarioVariables = scenario.variableValues as Record<string, string>;
const templateHasVariables =
vars?.length === 0 ||
vars?.some((v) => scenarioVariables[v.label] !== undefined);
vars?.length === 0 || vars?.some((v) => scenarioVariables[v.label] !== undefined);
let disabledReason: string | null = null;
if (!templateHasVariables)
disabledReason = "Add a value to the scenario variables to see output";
if (!templateHasVariables) disabledReason = "Add a value to the scenario variables to see output";
if (variant.config === null || Object.keys(variant.config).length === 0)
disabledReason = "Save your prompt variant to see output";
@@ -45,17 +45,20 @@ export default function OutputCell({
if (disabledReason) return <Text color="gray.500">{disabledReason}</Text>;
if (output.isLoading) return <Center h="100%" w="100%"><Spinner /></Center>;
if (output.isLoading)
return (
<Center h="100%" w="100%">
<Spinner />
</Center>
);
if (!output.data)
return <Text color="gray.500">Error retrieving output</Text>;
if (!output.data) return <Text color="gray.500">Error retrieving output</Text>;
if (output.data.errorMessage) {
return <Text color="red.600">Error: {output.data.errorMessage}</Text>;
}
const response = output.data
?.output as unknown as CreateChatCompletionResponse;
const response = output.data?.output as unknown as CreateChatCompletionResponse;
const message = response?.choices?.[0]?.message;
if (message?.function_call) {
@@ -64,9 +67,7 @@ export default function OutputCell({
try {
parsedArgs = JSON.parse(rawArgs);
} catch (e: any) {
parsedArgs = `Failed to parse arguments as JSON: '${rawArgs}' ERROR: ${
e.message as string
}`;
parsedArgs = `Failed to parse arguments as JSON: '${rawArgs}' ERROR: ${e.message as string}`;
}
return (
@@ -88,9 +89,26 @@ export default function OutputCell({
{ maxLength: 40 }
)}
</SyntaxHighlighter>
<OutputStats modelOutput={output.data} />
</Box>
);
}
return <Box whiteSpace="pre-wrap">{message?.content ?? JSON.stringify(output.data.output)}</Box>;
return (
<Flex w="100%" h="100%" direction="column" justifyContent="space-between" whiteSpace="pre-wrap">
{message?.content ?? JSON.stringify(output.data.output)}
<OutputStats modelOutput={output.data} />
</Flex>
);
}
const OutputStats = ({ modelOutput }: { modelOutput: ModelOutput }) => {
const timeToComplete = modelOutput.timeToComplete;
return (
<Flex justifyContent="flex-end" alignItems="center" color="gray.500" fontSize="xs" mt={2}>
<Icon as={BsClock} mr={0.5} />
<Text>{(timeToComplete / 1000).toFixed(2)}s</Text>
</Flex>
);
};

View File

@@ -61,6 +61,7 @@ export const modelOutputsRouter = createTRPCRouter({
output: existingResponse.output as Prisma.InputJsonValue,
statusCode: existingResponse.statusCode,
errorMessage: existingResponse.errorMessage,
timeToComplete: existingResponse.timeToComplete,
};
} else {
modelResponse = await getChatCompletion(filledTemplate, env.OPENAI_API_KEY);

View File

@@ -6,12 +6,14 @@ type CompletionResponse = {
output: Prisma.InputJsonValue | typeof Prisma.JsonNull;
statusCode: number;
errorMessage: string | null;
timeToComplete: number
};
export async function getChatCompletion(
payload: JSONSerializable,
apiKey: string
): Promise<CompletionResponse> {
const start = Date.now();
const response = await fetch("https://api.openai.com/v1/chat/completions", {
method: "POST",
headers: {
@@ -25,9 +27,11 @@ export async function getChatCompletion(
output: Prisma.JsonNull,
errorMessage: null,
statusCode: response.status,
timeToComplete: 0
};
try {
resp.timeToComplete = Date.now() - start;
resp.output = await response.json();
if (!response.ok) {