Minor edits (#196)
* Add command to delete last fine tune * Change ids in seeded dashboard * Change fine tune icon
This commit is contained in:
12
app/prisma/deleteOneFineTune.ts
Normal file
12
app/prisma/deleteOneFineTune.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { prisma } from "~/server/db";
|
||||
|
||||
// delete most recent fineTune
|
||||
const mostRecentFineTune = await prisma.fineTune.findFirst({
|
||||
orderBy: { createdAt: "desc" },
|
||||
});
|
||||
|
||||
if (mostRecentFineTune) {
|
||||
await prisma.fineTune.delete({
|
||||
where: { id: mostRecentFineTune.id },
|
||||
});
|
||||
}
|
||||
@@ -80,7 +80,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
},
|
||||
respStatus: 200,
|
||||
respPayload: {
|
||||
id: "chatcmpl-7lNspqePJWVyXwXebupxb1eMozo6Q",
|
||||
id: "chatcmpl-7",
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
usage: {
|
||||
total_tokens: 241,
|
||||
@@ -108,7 +108,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
inputTokens: 236,
|
||||
outputTokens: 5,
|
||||
finishReason: "stop",
|
||||
tags: [],
|
||||
tags: [{ name: "prompt_id", value: "define_func" }],
|
||||
},
|
||||
{
|
||||
reqPayload: {
|
||||
@@ -167,7 +167,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
},
|
||||
respStatus: 200,
|
||||
respPayload: {
|
||||
id: "chatcmpl-7lNifmc5AncyAvleZRDBhAcLFYBIT",
|
||||
id: "chatcmpl-7",
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
usage: {
|
||||
total_tokens: 227,
|
||||
@@ -210,7 +210,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
},
|
||||
respStatus: 200,
|
||||
respPayload: {
|
||||
id: "chatcmpl-7lNh1TtrsJVgz3Nj70bKkZZk7xPi7",
|
||||
id: "chatcmpl-7",
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
usage: {
|
||||
total_tokens: 21,
|
||||
@@ -234,7 +234,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
inputTokens: 14,
|
||||
outputTokens: 7,
|
||||
finishReason: "stop",
|
||||
tags: [{ name: "prompt_id", value: "id2" }],
|
||||
tags: [{ name: "prompt_id", value: "translate_text" }],
|
||||
},
|
||||
{
|
||||
reqPayload: {
|
||||
@@ -281,7 +281,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
},
|
||||
respStatus: 200,
|
||||
respPayload: {
|
||||
id: "chatcmpl-7lQS3MktOT8BTgNEytl9dkyssCQqL",
|
||||
id: "chatcmpl-7",
|
||||
model: "gpt-4-0613",
|
||||
usage: {
|
||||
total_tokens: 2910,
|
||||
@@ -311,7 +311,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
outputTokens: 108,
|
||||
finishReason: "stop",
|
||||
tags: [
|
||||
{ name: "prompt_id", value: "chatcmpl-7lQS3MktOT8BTgNEytl9dkyssCQqL" },
|
||||
{ name: "prompt_id", value: "chatcmpl-7" },
|
||||
{ name: "some_other_tag", value: "some_other_value" },
|
||||
],
|
||||
},
|
||||
@@ -339,7 +339,7 @@ const loggedCallsToCreate: Prisma.LoggedCallCreateManyInput[] = [];
|
||||
const loggedCallModelResponsesToCreate: Prisma.LoggedCallModelResponseCreateManyInput[] = [];
|
||||
const loggedCallsToUpdate: Prisma.LoggedCallUpdateArgs[] = [];
|
||||
const loggedCallTagsToCreate: Prisma.LoggedCallTagCreateManyInput[] = [];
|
||||
for (let i = 0; i < 1437; i++) {
|
||||
for (let i = 0; i < 11437; i++) {
|
||||
const loggedCallId = uuidv4();
|
||||
const loggedCallModelResponseId = uuidv4();
|
||||
const template =
|
||||
|
||||
@@ -16,7 +16,7 @@ import Link from "next/link";
|
||||
import { BsGearFill, BsGithub, BsPersonCircle } from "react-icons/bs";
|
||||
import { IoStatsChartOutline } from "react-icons/io5";
|
||||
import { RiHome3Line, RiFlaskLine } from "react-icons/ri";
|
||||
import { FaRobot } from "react-icons/fa";
|
||||
import { AiOutlineThunderbolt } from "react-icons/ai";
|
||||
import { signIn, useSession } from "next-auth/react";
|
||||
import ProjectMenu from "./ProjectMenu";
|
||||
import NavSidebarOption from "./NavSidebarOption";
|
||||
@@ -75,7 +75,7 @@ const NavSidebar = () => {
|
||||
|
||||
<IconLink icon={RiHome3Line} label="Dashboard" href="/dashboard" beta />
|
||||
<IconLink icon={IoStatsChartOutline} label="Request Logs" href="/request-logs" beta />
|
||||
<IconLink icon={FaRobot} label="Fine Tunes" href="/fine-tunes" beta />
|
||||
<IconLink icon={AiOutlineThunderbolt} label="Fine Tunes" href="/fine-tunes" beta />
|
||||
<IconLink icon={RiFlaskLine} label="Experiments" href="/experiments" />
|
||||
<VStack w="full" alignItems="flex-start" spacing={0} pt={8}>
|
||||
<Text
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
type UseDisclosureReturn,
|
||||
Input,
|
||||
} from "@chakra-ui/react";
|
||||
import { FaRobot } from "react-icons/fa";
|
||||
import { AiTwotoneThunderbolt } from "react-icons/ai";
|
||||
import humanId from "human-id";
|
||||
import { useRouter } from "next/router";
|
||||
|
||||
@@ -39,7 +39,7 @@ const FineTuneButton = () => {
|
||||
<ActionButton
|
||||
onClick={disclosure.onOpen}
|
||||
label="Fine Tune"
|
||||
icon={FaRobot}
|
||||
icon={AiTwotoneThunderbolt}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
/>
|
||||
<FineTuneModal disclosure={disclosure} />
|
||||
@@ -90,7 +90,7 @@ const FineTuneModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={FaRobot} />
|
||||
<Icon as={AiTwotoneThunderbolt} />
|
||||
<Text>Fine Tune</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
|
||||
Reference in New Issue
Block a user