Compare commits
21 Commits
publish-py
...
upload-jso
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ffeed77ab4 | ||
|
|
5de1d293d6 | ||
|
|
54b4b1edab | ||
|
|
93a967c6ac | ||
|
|
ed25dcce39 | ||
|
|
c9a53c2b94 | ||
|
|
91f1b2a8ec | ||
|
|
ba9221c093 | ||
|
|
071ce47411 | ||
|
|
4fddc80dc5 | ||
|
|
5aadf3c2ba | ||
|
|
7c4ab151a4 | ||
|
|
c5c8dbf65e | ||
|
|
82b94657b1 | ||
|
|
0a642fac2a | ||
|
|
4d90ff68c8 | ||
|
|
6ac554f7e1 | ||
|
|
422a6ff4c6 | ||
|
|
6153ebda41 | ||
|
|
b682bd6b78 | ||
|
|
43a22865fd |
@@ -40,3 +40,8 @@ SMTP_HOST="placeholder"
|
||||
SMTP_PORT="placeholder"
|
||||
SMTP_LOGIN="placeholder"
|
||||
SMTP_PASSWORD="placeholder"
|
||||
|
||||
# Azure credentials are necessary for uploading large training data files
|
||||
AZURE_STORAGE_ACCOUNT_NAME="placeholder"
|
||||
AZURE_STORAGE_ACCOUNT_KEY="placeholder"
|
||||
AZURE_STORAGE_CONTAINER_NAME="placeholder"
|
||||
|
||||
2
app/@types/nextjs-routes.d.ts
vendored
2
app/@types/nextjs-routes.d.ts
vendored
@@ -19,6 +19,8 @@ declare module "nextjs-routes" {
|
||||
| DynamicRoute<"/api/v1/[...trpc]", { "trpc": string[] }>
|
||||
| StaticRoute<"/api/v1/openapi">
|
||||
| StaticRoute<"/dashboard">
|
||||
| DynamicRoute<"/datasets/[id]", { "id": string }>
|
||||
| StaticRoute<"/datasets">
|
||||
| DynamicRoute<"/experiments/[experimentSlug]", { "experimentSlug": string }>
|
||||
| StaticRoute<"/experiments">
|
||||
| StaticRoute<"/fine-tunes">
|
||||
|
||||
@@ -26,6 +26,8 @@
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.5.8",
|
||||
"@apidevtools/json-schema-ref-parser": "^10.1.0",
|
||||
"@azure/identity": "^3.3.0",
|
||||
"@azure/storage-blob": "12.15.0",
|
||||
"@babel/standalone": "^7.22.9",
|
||||
"@chakra-ui/anatomy": "^2.2.0",
|
||||
"@chakra-ui/next-js": "^2.1.4",
|
||||
@@ -69,6 +71,7 @@
|
||||
"jsonschema": "^1.4.1",
|
||||
"kysely": "^0.26.1",
|
||||
"kysely-codegen": "^0.10.1",
|
||||
"llama-tokenizer-js": "^1.1.3",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "^0.265.0",
|
||||
"marked": "^7.0.3",
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- Added the required column `inputTokens` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
|
||||
- Added the required column `outputTokens` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
|
||||
- Added the required column `type` to the `DatasetEntry` table without a default value. This is not possible if the table is not empty.
|
||||
|
||||
*/
|
||||
-- CreateEnum
|
||||
CREATE TYPE "DatasetEntryType" AS ENUM ('TRAIN', 'TEST');
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "Dataset" ADD COLUMN "trainingRatio" DOUBLE PRECISION NOT NULL DEFAULT 0.8;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "DatasetEntry" ADD COLUMN "input" JSONB NOT NULL DEFAULT '[]',
|
||||
ADD COLUMN "inputTokens" INTEGER NOT NULL DEFAULT 0,
|
||||
ADD COLUMN "output" JSONB,
|
||||
ADD COLUMN "outputTokens" INTEGER NOT NULL DEFAULT 0,
|
||||
ADD COLUMN "type" "DatasetEntryType" NOT NULL DEFAULT 'TRAIN';
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "DatasetEntry_datasetId_createdAt_id_idx" ON "DatasetEntry"("datasetId", "createdAt", "id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "DatasetEntry_datasetId_type_idx" ON "DatasetEntry"("datasetId", "type");
|
||||
@@ -0,0 +1,5 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "DatasetEntry" ALTER COLUMN "loggedCallId" DROP NOT NULL,
|
||||
ALTER COLUMN "inputTokens" DROP DEFAULT,
|
||||
ALTER COLUMN "outputTokens" DROP DEFAULT,
|
||||
ALTER COLUMN "type" DROP DEFAULT;
|
||||
@@ -0,0 +1,23 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "DatasetFileUploadStatus" AS ENUM ('PENDING', 'DOWNLOADING', 'PROCESSING', 'SAVING', 'COMPLETE', 'ERROR');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatasetFileUpload" (
|
||||
"id" UUID NOT NULL,
|
||||
"datasetId" UUID NOT NULL,
|
||||
"blobName" TEXT NOT NULL,
|
||||
"fileName" TEXT NOT NULL,
|
||||
"fileSize" INTEGER NOT NULL,
|
||||
"progress" INTEGER NOT NULL DEFAULT 0,
|
||||
"status" "DatasetFileUploadStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"uploadedAt" TIMESTAMP(3) NOT NULL,
|
||||
"visible" BOOLEAN NOT NULL DEFAULT true,
|
||||
"errorMessage" TEXT,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "DatasetFileUpload_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "DatasetFileUpload" ADD CONSTRAINT "DatasetFileUpload_datasetId_fkey" FOREIGN KEY ("datasetId") REFERENCES "Dataset"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -176,12 +176,42 @@ model OutputEvaluation {
|
||||
@@unique([modelResponseId, evaluationId])
|
||||
}
|
||||
|
||||
|
||||
enum DatasetFileUploadStatus {
|
||||
PENDING
|
||||
DOWNLOADING
|
||||
PROCESSING
|
||||
SAVING
|
||||
COMPLETE
|
||||
ERROR
|
||||
}
|
||||
|
||||
model DatasetFileUpload {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
datasetId String @db.Uuid
|
||||
dataset Dataset @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
blobName String
|
||||
fileName String
|
||||
fileSize Int
|
||||
progress Int @default(0) // Percentage
|
||||
status DatasetFileUploadStatus @default(PENDING)
|
||||
uploadedAt DateTime
|
||||
visible Boolean @default(true)
|
||||
errorMessage String?
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Dataset {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
name String
|
||||
datasetEntries DatasetEntry[]
|
||||
fineTunes FineTune[]
|
||||
name String
|
||||
datasetEntries DatasetEntry[]
|
||||
fineTunes FineTune[]
|
||||
datasetFileUploads DatasetFileUpload[]
|
||||
trainingRatio Float @default(0.8)
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
@@ -190,17 +220,32 @@ model Dataset {
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
enum DatasetEntryType {
|
||||
TRAIN
|
||||
TEST
|
||||
}
|
||||
|
||||
model DatasetEntry {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
loggedCallId String @db.Uuid
|
||||
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
|
||||
loggedCallId String? @db.Uuid
|
||||
loggedCall LoggedCall? @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
|
||||
|
||||
input Json @default("[]")
|
||||
output Json?
|
||||
inputTokens Int
|
||||
outputTokens Int
|
||||
|
||||
type DatasetEntryType
|
||||
|
||||
datasetId String @db.Uuid
|
||||
dataset Dataset? @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([datasetId, createdAt, id])
|
||||
@@index([datasetId, type])
|
||||
}
|
||||
|
||||
model Project {
|
||||
@@ -452,7 +497,7 @@ model FineTune {
|
||||
deploymentFinishedAt DateTime?
|
||||
|
||||
datasetId String @db.Uuid
|
||||
dataset Dataset @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
dataset Dataset @relation(fields: [datasetId], references: [id], onDelete: Cascade)
|
||||
|
||||
projectId String @db.Uuid
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { prisma } from "~/server/db";
|
||||
import { generateNewCell } from "~/server/utils/generateNewCell";
|
||||
import dedent from "dedent";
|
||||
import { execSync } from "child_process";
|
||||
import fs from "fs";
|
||||
|
||||
@@ -108,7 +108,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
inputTokens: 236,
|
||||
outputTokens: 5,
|
||||
finishReason: "stop",
|
||||
tags: [{ name: "prompt_id", value: "define_func" }],
|
||||
tags: [{ name: "prompt_id", value: "add_scenario" }],
|
||||
},
|
||||
{
|
||||
reqPayload: {
|
||||
@@ -311,7 +311,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
outputTokens: 108,
|
||||
finishReason: "stop",
|
||||
tags: [
|
||||
{ name: "prompt_id", value: "chatcmpl-7" },
|
||||
{ name: "prompt_id", value: "define_func" },
|
||||
{ name: "some_other_tag", value: "some_other_value" },
|
||||
],
|
||||
},
|
||||
|
||||
@@ -3,16 +3,18 @@ import { useState } from "react";
|
||||
import { Button, HStack, type ButtonProps, Icon, Text } from "@chakra-ui/react";
|
||||
import { type IconType } from "react-icons";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { BetaModal } from "../BetaModal";
|
||||
import { BetaModal } from "./BetaModal";
|
||||
|
||||
const ActionButton = ({
|
||||
icon,
|
||||
iconBoxSize = 3.5,
|
||||
label,
|
||||
requireBeta = false,
|
||||
onClick,
|
||||
...buttonProps
|
||||
}: {
|
||||
icon: IconType;
|
||||
iconBoxSize?: number;
|
||||
label: string;
|
||||
requireBeta?: boolean;
|
||||
onClick?: () => void;
|
||||
@@ -39,7 +41,9 @@ const ActionButton = ({
|
||||
{...buttonProps}
|
||||
>
|
||||
<HStack spacing={1}>
|
||||
{icon && <Icon as={icon} color={requireBeta ? "orange.400" : undefined} />}
|
||||
{icon && (
|
||||
<Icon as={icon} boxSize={iconBoxSize} color={requireBeta ? "orange.400" : undefined} />
|
||||
)}
|
||||
<Text display={{ base: "none", md: "flex" }}>{label}</Text>
|
||||
</HStack>
|
||||
</Button>
|
||||
@@ -16,12 +16,16 @@ import {
|
||||
|
||||
import { FiChevronDown } from "react-icons/fi";
|
||||
import { BiCheck } from "react-icons/bi";
|
||||
import { isEqual } from "lodash-es";
|
||||
import React from "react";
|
||||
|
||||
type InputDropdownProps<T> = {
|
||||
options: ReadonlyArray<T>;
|
||||
selectedOption: T;
|
||||
onSelect: (option: T) => void;
|
||||
inputGroupProps?: InputGroupProps;
|
||||
getDisplayLabel?: (option: T) => string;
|
||||
isDisabled?: boolean;
|
||||
};
|
||||
|
||||
const InputDropdown = <T,>({
|
||||
@@ -29,19 +33,21 @@ const InputDropdown = <T,>({
|
||||
selectedOption,
|
||||
onSelect,
|
||||
inputGroupProps,
|
||||
getDisplayLabel = (option) => option as string,
|
||||
isDisabled,
|
||||
}: InputDropdownProps<T>) => {
|
||||
const popover = useDisclosure();
|
||||
const { onOpen, ...popover } = useDisclosure();
|
||||
|
||||
return (
|
||||
<Popover placement="bottom-start" {...popover}>
|
||||
<Popover placement="bottom-start" onOpen={isDisabled ? undefined : onOpen} {...popover}>
|
||||
<PopoverTrigger>
|
||||
<InputGroup
|
||||
cursor="pointer"
|
||||
w={(selectedOption as string).length * 14 + 180}
|
||||
w={getDisplayLabel(selectedOption).length * 14 + 180}
|
||||
{...inputGroupProps}
|
||||
>
|
||||
<Input
|
||||
value={selectedOption as string}
|
||||
value={getDisplayLabel(selectedOption)}
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function -- controlled input requires onChange
|
||||
onChange={() => {}}
|
||||
cursor="pointer"
|
||||
@@ -52,9 +58,10 @@ const InputDropdown = <T,>({
|
||||
onFocus={(e) => {
|
||||
e.target.blur();
|
||||
}}
|
||||
isDisabled={isDisabled}
|
||||
/>
|
||||
<InputRightElement>
|
||||
<Icon as={FiChevronDown} />
|
||||
<Icon as={FiChevronDown} color={isDisabled ? "gray.300" : undefined} />
|
||||
</InputRightElement>
|
||||
</InputGroup>
|
||||
</PopoverTrigger>
|
||||
@@ -78,8 +85,10 @@ const InputDropdown = <T,>({
|
||||
fontSize="sm"
|
||||
borderBottomWidth={1}
|
||||
>
|
||||
<Text mr={16}>{option as string}</Text>
|
||||
{option === selectedOption && <Icon as={BiCheck} color="blue.500" boxSize={5} />}
|
||||
<Text mr={16}>{getDisplayLabel(option)}</Text>
|
||||
{isEqual(option, selectedOption) && (
|
||||
<Icon as={BiCheck} color="blue.500" boxSize={5} />
|
||||
)}
|
||||
</HStack>
|
||||
))}
|
||||
</VStack>
|
||||
|
||||
@@ -19,15 +19,13 @@ import {
|
||||
useScenarios,
|
||||
} from "~/utils/hooks";
|
||||
import { BsGear, BsPencil, BsPlus, BsStars } from "react-icons/bs";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { api } from "~/utils/api";
|
||||
|
||||
export const ActionButton = (props: ButtonProps) => (
|
||||
<Button size="sm" variant="ghost" color="gray.600" {...props} />
|
||||
);
|
||||
|
||||
export const ScenariosHeader = () => {
|
||||
const openDrawer = useAppStore((s) => s.openDrawer);
|
||||
export const ScenariosHeader = ({ openDrawer }: { openDrawer: () => void }) => {
|
||||
const { canModify } = useExperimentAccess();
|
||||
const scenarios = useScenarios();
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
||||
inputTokens: 0,
|
||||
outputTokens: 0,
|
||||
scenarioCount: 0,
|
||||
outputCount: 0,
|
||||
finishedCount: 0,
|
||||
awaitingCompletions: false,
|
||||
awaitingEvals: false,
|
||||
},
|
||||
@@ -42,7 +42,7 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
||||
|
||||
const scale = chroma.scale([failColor, neutralColor, passColor]).domain([0, 0.5, 1]);
|
||||
|
||||
const showNumFinished = data.scenarioCount > 0 && data.scenarioCount !== data.outputCount;
|
||||
const showNumFinished = data.scenarioCount > 0 && data.scenarioCount !== data.finishedCount;
|
||||
|
||||
return (
|
||||
<HStack
|
||||
@@ -55,7 +55,7 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
||||
<HStack px={cellPadding.x} flexWrap="wrap">
|
||||
{showNumFinished && (
|
||||
<Text>
|
||||
{data.outputCount} / {data.scenarioCount}
|
||||
{data.finishedCount} / {data.scenarioCount}
|
||||
</Text>
|
||||
)}
|
||||
{data.evalResults.map((result) => {
|
||||
|
||||
@@ -12,7 +12,13 @@ import ScenarioPaginator from "./ScenarioPaginator";
|
||||
import { Fragment } from "react";
|
||||
import useScrolledPast from "./useHasScrolledPast";
|
||||
|
||||
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
|
||||
export default function OutputsTable({
|
||||
experimentId,
|
||||
openDrawer,
|
||||
}: {
|
||||
experimentId: string | undefined;
|
||||
openDrawer: () => void;
|
||||
}) {
|
||||
const variants = api.promptVariants.list.useQuery(
|
||||
{ experimentId: experimentId as string },
|
||||
{ enabled: !!experimentId },
|
||||
@@ -91,7 +97,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
||||
colStart={1}
|
||||
borderRightWidth={0}
|
||||
>
|
||||
<ScenariosHeader />
|
||||
<ScenariosHeader openDrawer={openDrawer} />
|
||||
</GridItem>
|
||||
|
||||
{scenarios.data.scenarios.map((scenario, i) => (
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
import {
|
||||
Drawer,
|
||||
DrawerBody,
|
||||
DrawerCloseButton,
|
||||
DrawerContent,
|
||||
DrawerHeader,
|
||||
DrawerOverlay,
|
||||
Heading,
|
||||
VStack,
|
||||
type UseDisclosureReturn,
|
||||
} from "@chakra-ui/react";
|
||||
|
||||
import { DeleteButton } from "./DeleteButton";
|
||||
|
||||
export default function DatasetConfigurationDrawer({
|
||||
disclosure,
|
||||
}: {
|
||||
disclosure: UseDisclosureReturn;
|
||||
}) {
|
||||
return (
|
||||
<Drawer placement="right" size="md" {...disclosure}>
|
||||
<DrawerOverlay />
|
||||
<DrawerContent>
|
||||
<DrawerCloseButton />
|
||||
<DrawerHeader>
|
||||
<Heading size="md">Dataset Configuration</Heading>
|
||||
</DrawerHeader>
|
||||
<DrawerBody h="full" pb={4}>
|
||||
<VStack h="full" justifyContent="space-between">
|
||||
<VStack spacing={6}></VStack>
|
||||
<DeleteButton closeDrawer={disclosure.onClose} />
|
||||
</VStack>
|
||||
</DrawerBody>
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
import { Button, Icon, useDisclosure, Text } from "@chakra-ui/react";
|
||||
import { useRouter } from "next/router";
|
||||
import { BsTrash } from "react-icons/bs";
|
||||
|
||||
import { useHandledAsyncCallback, useDataset } from "~/utils/hooks";
|
||||
import DeleteDatasetDialog from "./DeleteDatasetDialog";
|
||||
|
||||
export const DeleteButton = ({ closeDrawer }: { closeDrawer: () => void }) => {
|
||||
const dataset = useDataset();
|
||||
const router = useRouter();
|
||||
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
const [onDelete] = useHandledAsyncCallback(async () => {
|
||||
await router.push({ pathname: "/datasets" });
|
||||
closeDrawer();
|
||||
}, [router, closeDrawer]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Button
|
||||
size="sm"
|
||||
variant="ghost"
|
||||
colorScheme="red"
|
||||
fontWeight="normal"
|
||||
onClick={disclosure.onOpen}
|
||||
>
|
||||
<Icon as={BsTrash} boxSize={4} />
|
||||
<Text ml={2}>Delete Dataset</Text>
|
||||
</Button>
|
||||
|
||||
<DeleteDatasetDialog
|
||||
datasetId={dataset.data?.id}
|
||||
onDelete={onDelete}
|
||||
disclosure={disclosure}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,73 @@
|
||||
import { useRef } from "react";
|
||||
import {
|
||||
type UseDisclosureReturn,
|
||||
AlertDialog,
|
||||
AlertDialogOverlay,
|
||||
AlertDialogContent,
|
||||
AlertDialogHeader,
|
||||
AlertDialogBody,
|
||||
AlertDialogFooter,
|
||||
Button,
|
||||
} from "@chakra-ui/react";
|
||||
import { api } from "~/utils/api";
|
||||
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
|
||||
const DeleteDatasetDialog = ({
|
||||
datasetId,
|
||||
onDelete,
|
||||
disclosure,
|
||||
}: {
|
||||
datasetId?: string;
|
||||
onDelete?: () => void;
|
||||
disclosure: UseDisclosureReturn;
|
||||
}) => {
|
||||
const cancelRef = useRef<HTMLButtonElement>(null);
|
||||
|
||||
const mutation = api.datasets.delete.useMutation();
|
||||
const utils = api.useContext();
|
||||
|
||||
const [onDeleteConfirm, deletionInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!datasetId) return;
|
||||
await mutation.mutateAsync({ id: datasetId });
|
||||
await utils.datasets.list.invalidate();
|
||||
onDelete?.();
|
||||
|
||||
disclosure.onClose();
|
||||
}, [mutation, datasetId, disclosure.onClose]);
|
||||
|
||||
console.log("dataset id", datasetId);
|
||||
|
||||
return (
|
||||
<AlertDialog leastDestructiveRef={cancelRef} {...disclosure}>
|
||||
<AlertDialogOverlay>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader fontSize="lg" fontWeight="bold">
|
||||
Delete Dataset
|
||||
</AlertDialogHeader>
|
||||
|
||||
<AlertDialogBody>
|
||||
If you delete this dataset all the associated dataset entries will be deleted as well.
|
||||
Are you sure?
|
||||
</AlertDialogBody>
|
||||
|
||||
<AlertDialogFooter>
|
||||
<Button ref={cancelRef} onClick={disclosure.onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
colorScheme="red"
|
||||
isLoading={deletionInProgress}
|
||||
onClick={onDeleteConfirm}
|
||||
ml={3}
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialogOverlay>
|
||||
</AlertDialog>
|
||||
);
|
||||
};
|
||||
|
||||
export default DeleteDatasetDialog;
|
||||
@@ -0,0 +1,46 @@
|
||||
import { Card, Table, Tbody } from "@chakra-ui/react";
|
||||
import { useState } from "react";
|
||||
import { useDatasetEntries } from "~/utils/hooks";
|
||||
import { TableHeader, TableRow, EmptyTableRow } from "./TableRow";
|
||||
import DatasetEntryEditorDrawer from "./DatasetEntryEditorDrawer";
|
||||
|
||||
export default function DatasetEntriesTable() {
|
||||
const [expandedDatasetEntryId, setExpandedDatasetEntryId] = useState<string | null>(null);
|
||||
const datasetEntries = useDatasetEntries().data?.entries;
|
||||
|
||||
return (
|
||||
<>
|
||||
<Card width="100%" overflowX="auto">
|
||||
<Table>
|
||||
<TableHeader />
|
||||
<Tbody>
|
||||
{datasetEntries?.length ? (
|
||||
datasetEntries?.map((entry) => {
|
||||
return (
|
||||
<TableRow
|
||||
key={entry.id}
|
||||
datasetEntry={entry}
|
||||
onToggle={() => {
|
||||
if (entry.id === expandedDatasetEntryId) {
|
||||
setExpandedDatasetEntryId(null);
|
||||
} else {
|
||||
setExpandedDatasetEntryId(entry.id);
|
||||
}
|
||||
}}
|
||||
showOptions
|
||||
/>
|
||||
);
|
||||
})
|
||||
) : (
|
||||
<EmptyTableRow />
|
||||
)}
|
||||
</Tbody>
|
||||
</Table>
|
||||
</Card>
|
||||
<DatasetEntryEditorDrawer
|
||||
datasetEntryId={expandedDatasetEntryId}
|
||||
clearDatasetEntryId={() => setExpandedDatasetEntryId(null)}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,174 @@
|
||||
import { useState, useEffect, useMemo } from "react";
|
||||
import {
|
||||
Drawer,
|
||||
DrawerBody,
|
||||
DrawerCloseButton,
|
||||
DrawerContent,
|
||||
DrawerHeader,
|
||||
DrawerOverlay,
|
||||
DrawerFooter,
|
||||
Heading,
|
||||
VStack,
|
||||
HStack,
|
||||
Button,
|
||||
Text,
|
||||
Divider,
|
||||
Icon,
|
||||
} from "@chakra-ui/react";
|
||||
import { type CreateChatCompletionRequestMessage } from "openai/resources/chat";
|
||||
import { BsPlus } from "react-icons/bs";
|
||||
import { type DatasetEntryType } from "@prisma/client";
|
||||
|
||||
import { api } from "~/utils/api";
|
||||
import { useDatasetEntry, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import EditableMessage from "./EditableMessage";
|
||||
import EntryTypeDropdown from "./EntryTypeDropdown";
|
||||
|
||||
export default function DatasetDentryEditorDrawer({
|
||||
datasetEntryId,
|
||||
clearDatasetEntryId,
|
||||
}: {
|
||||
datasetEntryId: string | null;
|
||||
clearDatasetEntryId: () => void;
|
||||
}) {
|
||||
const utils = api.useContext();
|
||||
|
||||
const datasetEntry = useDatasetEntry(datasetEntryId).data;
|
||||
|
||||
const savedInputMessages = useMemo(
|
||||
() => datasetEntry?.input as unknown as CreateChatCompletionRequestMessage[],
|
||||
[datasetEntry],
|
||||
);
|
||||
const savedOutputMessage = useMemo(
|
||||
() => datasetEntry?.output as unknown as CreateChatCompletionRequestMessage,
|
||||
[datasetEntry],
|
||||
);
|
||||
|
||||
const [inputMessagesToSave, setInputMessagesToSave] = useState<
|
||||
CreateChatCompletionRequestMessage[]
|
||||
>([]);
|
||||
const [outputMessageToSave, setOutputMessageToSave] =
|
||||
useState<CreateChatCompletionRequestMessage | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (savedInputMessages) {
|
||||
setInputMessagesToSave(savedInputMessages);
|
||||
setOutputMessageToSave(savedOutputMessage);
|
||||
}
|
||||
}, [savedInputMessages, savedOutputMessage]);
|
||||
|
||||
const updateMutation = api.datasetEntries.update.useMutation();
|
||||
const [onSave, savingInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!datasetEntryId || !inputMessagesToSave) return;
|
||||
await updateMutation.mutateAsync({
|
||||
id: datasetEntryId,
|
||||
updates: {
|
||||
input: JSON.stringify(inputMessagesToSave),
|
||||
output: JSON.stringify(outputMessageToSave),
|
||||
},
|
||||
});
|
||||
await utils.datasetEntries.list.invalidate();
|
||||
await utils.datasetEntries.get.invalidate({ id: datasetEntryId });
|
||||
}, [updateMutation, datasetEntryId, inputMessagesToSave, outputMessageToSave, utils]);
|
||||
|
||||
const [onUpdateType] = useHandledAsyncCallback(
|
||||
async (type: DatasetEntryType) => {
|
||||
if (!datasetEntryId) return;
|
||||
await updateMutation.mutateAsync({
|
||||
id: datasetEntryId,
|
||||
updates: {
|
||||
type,
|
||||
},
|
||||
});
|
||||
await utils.datasetEntries.list.invalidate();
|
||||
await utils.datasetEntries.get.invalidate({ id: datasetEntryId });
|
||||
},
|
||||
[updateMutation, datasetEntryId, utils],
|
||||
);
|
||||
|
||||
return (
|
||||
<Drawer isOpen={!!datasetEntryId} onClose={clearDatasetEntryId} placement="right" size="md">
|
||||
<DrawerOverlay />
|
||||
<DrawerContent>
|
||||
<DrawerCloseButton pt={6} />
|
||||
<DrawerHeader bgColor="orange.50">
|
||||
<HStack w="full" justifyContent="space-between" pr={8}>
|
||||
<Heading size="md">Dataset Entry</Heading>
|
||||
{datasetEntry && (
|
||||
<EntryTypeDropdown type={datasetEntry.type} onTypeChange={onUpdateType} />
|
||||
)}
|
||||
</HStack>
|
||||
</DrawerHeader>
|
||||
<DrawerBody h="full" pb={4} bgColor="orange.50">
|
||||
<VStack h="full" justifyContent="space-between">
|
||||
<VStack w="full" spacing={12} py={4}>
|
||||
<VStack w="full" alignItems="flex-start">
|
||||
<Text fontWeight="bold">Input</Text>
|
||||
{inputMessagesToSave.map((message, i) => {
|
||||
return (
|
||||
<>
|
||||
<Divider key={`divider-${i}`} my={4} />
|
||||
<EditableMessage
|
||||
key={i}
|
||||
message={message}
|
||||
onEdit={(message) => {
|
||||
const newInputMessages = [...inputMessagesToSave];
|
||||
newInputMessages[i] = message;
|
||||
setInputMessagesToSave(newInputMessages);
|
||||
}}
|
||||
onDelete={() => {
|
||||
const newInputMessages = [...inputMessagesToSave];
|
||||
newInputMessages.splice(i, 1);
|
||||
setInputMessagesToSave(newInputMessages);
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
})}
|
||||
<Divider my={4} />
|
||||
<Button
|
||||
w="full"
|
||||
onClick={() =>
|
||||
setInputMessagesToSave([...inputMessagesToSave, { role: "user", content: "" }])
|
||||
}
|
||||
variant="outline"
|
||||
color="gray.500"
|
||||
_hover={{ bgColor: "orange.100" }}
|
||||
>
|
||||
<HStack spacing={0}>
|
||||
<Text>Add Message</Text>
|
||||
<Icon as={BsPlus} boxSize={6} />
|
||||
</HStack>
|
||||
</Button>
|
||||
</VStack>
|
||||
<VStack w="full" alignItems="flex-start">
|
||||
<Text fontWeight="bold">Output</Text>
|
||||
<Divider my={4} />
|
||||
<EditableMessage
|
||||
message={outputMessageToSave}
|
||||
onEdit={(message) => setOutputMessageToSave(message)}
|
||||
isOutput
|
||||
/>
|
||||
</VStack>
|
||||
</VStack>
|
||||
</VStack>
|
||||
</DrawerBody>
|
||||
<DrawerFooter bgColor="orange.50">
|
||||
<HStack>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setInputMessagesToSave(savedInputMessages);
|
||||
setOutputMessageToSave(savedOutputMessage);
|
||||
}}
|
||||
>
|
||||
Reset
|
||||
</Button>
|
||||
<Button isLoading={savingInProgress} onClick={onSave} colorScheme="orange">
|
||||
Save
|
||||
</Button>
|
||||
</HStack>
|
||||
</DrawerFooter>
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
import { VStack, HStack, Tooltip, IconButton, Icon } from "@chakra-ui/react";
|
||||
import { type CreateChatCompletionRequestMessage } from "openai/resources/chat";
|
||||
import { BsX } from "react-icons/bs";
|
||||
|
||||
import AutoResizeTextArea from "~/components/AutoResizeTextArea";
|
||||
import InputDropdown from "~/components/InputDropdown";
|
||||
import { parseableToFunctionCall } from "~/utils/utils";
|
||||
import FunctionCallEditor from "./FunctionCallEditor";
|
||||
|
||||
const MESSAGE_ROLE_OPTIONS = ["system", "user", "assistant", "function"] as const;
|
||||
const OUTPUT_OPTIONS = ["plaintext", "func_call"] as const;
|
||||
|
||||
const EditableMessage = ({
|
||||
message,
|
||||
onEdit,
|
||||
onDelete,
|
||||
isOutput,
|
||||
}: {
|
||||
message: CreateChatCompletionRequestMessage | null;
|
||||
onEdit: (message: CreateChatCompletionRequestMessage) => void;
|
||||
onDelete?: () => void;
|
||||
isOutput?: boolean;
|
||||
}) => {
|
||||
const { role = "assistant", content = "", function_call } = message || {};
|
||||
|
||||
const currentOutputOption: (typeof OUTPUT_OPTIONS)[number] = function_call
|
||||
? "func_call"
|
||||
: "plaintext";
|
||||
|
||||
return (
|
||||
<VStack w="full">
|
||||
<HStack w="full" justifyContent="space-between">
|
||||
<HStack>
|
||||
{!isOutput && (
|
||||
<InputDropdown
|
||||
options={MESSAGE_ROLE_OPTIONS}
|
||||
selectedOption={role}
|
||||
onSelect={(option) => {
|
||||
const updatedMessage = { role: option, content };
|
||||
if (role === "assistant" && currentOutputOption === "func_call") {
|
||||
updatedMessage.content = JSON.stringify(function_call, null, 2);
|
||||
}
|
||||
onEdit(updatedMessage);
|
||||
}}
|
||||
inputGroupProps={{ w: "32", bgColor: "white" }}
|
||||
/>
|
||||
)}
|
||||
{role === "assistant" && (
|
||||
<InputDropdown
|
||||
options={OUTPUT_OPTIONS}
|
||||
selectedOption={currentOutputOption}
|
||||
onSelect={(option) => {
|
||||
const updatedMessage: CreateChatCompletionRequestMessage = {
|
||||
role,
|
||||
content: null,
|
||||
function_call: undefined,
|
||||
};
|
||||
if (option === "plaintext") {
|
||||
updatedMessage.content = JSON.stringify(function_call, null, 2);
|
||||
} else if (option === "func_call") {
|
||||
updatedMessage.function_call =
|
||||
content && parseableToFunctionCall(content)
|
||||
? JSON.parse(content)
|
||||
: { name: "", arguments: "{}" };
|
||||
}
|
||||
onEdit(updatedMessage);
|
||||
}}
|
||||
inputGroupProps={{ w: "32", bgColor: "white" }}
|
||||
/>
|
||||
)}
|
||||
</HStack>
|
||||
{!isOutput && (
|
||||
<HStack>
|
||||
<Tooltip label="Delete" hasArrow>
|
||||
<IconButton
|
||||
aria-label="Delete"
|
||||
icon={<Icon as={BsX} boxSize={6} />}
|
||||
onClick={onDelete}
|
||||
size="xs"
|
||||
display="flex"
|
||||
colorScheme="gray"
|
||||
color="gray.500"
|
||||
variant="ghost"
|
||||
/>
|
||||
</Tooltip>
|
||||
</HStack>
|
||||
)}
|
||||
</HStack>
|
||||
{function_call ? (
|
||||
<FunctionCallEditor
|
||||
function_call={function_call}
|
||||
onEdit={(function_call) => onEdit({ role, function_call, content: null })}
|
||||
/>
|
||||
) : (
|
||||
<AutoResizeTextArea
|
||||
value={content || JSON.stringify(function_call, null, 2)}
|
||||
onChange={(e) => onEdit({ role, content: e.target.value })}
|
||||
bgColor="white"
|
||||
/>
|
||||
)}
|
||||
</VStack>
|
||||
);
|
||||
};
|
||||
|
||||
export default EditableMessage;
|
||||
@@ -0,0 +1,24 @@
|
||||
import { type DatasetEntryType } from "@prisma/client";
|
||||
|
||||
import InputDropdown from "~/components/InputDropdown";
|
||||
|
||||
const ENTRY_TYPE_OPTIONS: DatasetEntryType[] = ["TRAIN", "TEST"];
|
||||
|
||||
const EntryTypeDropdown = ({
|
||||
type,
|
||||
onTypeChange,
|
||||
}: {
|
||||
type: DatasetEntryType;
|
||||
onTypeChange: (type: DatasetEntryType) => void;
|
||||
}) => {
|
||||
return (
|
||||
<InputDropdown
|
||||
options={ENTRY_TYPE_OPTIONS}
|
||||
selectedOption={type}
|
||||
onSelect={onTypeChange}
|
||||
inputGroupProps={{ w: "32", bgColor: "white" }}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default EntryTypeDropdown;
|
||||
@@ -0,0 +1,125 @@
|
||||
import { useRef, useMemo, useEffect } from "react";
|
||||
import { VStack, HStack, Text, Input, Box } from "@chakra-ui/react";
|
||||
import { type CreateChatCompletionRequestMessage } from "openai/resources/chat";
|
||||
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { type CreatedEditor } from "~/state/sharedVariantEditor.slice";
|
||||
|
||||
const FunctionCallEditor = ({
|
||||
function_call,
|
||||
onEdit,
|
||||
}: {
|
||||
function_call: CreateChatCompletionRequestMessage.FunctionCall;
|
||||
onEdit: (function_call: CreateChatCompletionRequestMessage.FunctionCall) => void;
|
||||
}) => {
|
||||
const monaco = useAppStore.use.sharedArgumentsEditor.monaco();
|
||||
const editorRef = useRef<CreatedEditor | null>(null);
|
||||
const editorId = useMemo(() => `editor_${Math.random().toString(36).substring(7)}`, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (monaco) {
|
||||
const container = document.getElementById(editorId) as HTMLElement;
|
||||
|
||||
const editor = monaco.editor.create(container, {
|
||||
value: function_call.arguments,
|
||||
language: "json",
|
||||
theme: "customTheme",
|
||||
lineNumbers: "off",
|
||||
minimap: { enabled: false },
|
||||
wrappingIndent: "indent",
|
||||
wrappingStrategy: "advanced",
|
||||
wordWrap: "on",
|
||||
folding: false,
|
||||
scrollbar: {
|
||||
alwaysConsumeMouseWheel: false,
|
||||
verticalScrollbarSize: 0,
|
||||
},
|
||||
wordWrapBreakAfterCharacters: "",
|
||||
wordWrapBreakBeforeCharacters: "",
|
||||
quickSuggestions: true,
|
||||
renderLineHighlight: "none",
|
||||
fontSize: 14,
|
||||
scrollBeyondLastLine: false,
|
||||
});
|
||||
|
||||
editorRef.current = editor;
|
||||
|
||||
const updateHeight = () => {
|
||||
const contentHeight = editor.getContentHeight();
|
||||
container.style.height = `${contentHeight}px`;
|
||||
editor.layout();
|
||||
};
|
||||
|
||||
const attemptDocumentFormat = () => {
|
||||
const action = editor.getAction("editor.action.formatDocument");
|
||||
if (action) {
|
||||
action
|
||||
.run()
|
||||
.then(updateHeight)
|
||||
.catch((error) => {
|
||||
console.error("Error running formatDocument:", error);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
editor.onDidBlurEditorText(() => {
|
||||
attemptDocumentFormat();
|
||||
onEdit({ name: function_call.name, arguments: editor.getValue() });
|
||||
});
|
||||
|
||||
// Interval function to check for action availability
|
||||
const checkForActionInterval = setInterval(() => {
|
||||
const formatted = attemptDocumentFormat();
|
||||
if (formatted) {
|
||||
clearInterval(checkForActionInterval); // Clear the interval once the action is found and run
|
||||
}
|
||||
}, 100); // Check every 100ms
|
||||
|
||||
// Add content change listener
|
||||
const contentChangeListener = editor.onDidChangeModelContent(updateHeight);
|
||||
|
||||
const resizeObserver = new ResizeObserver(() => {
|
||||
editor.layout();
|
||||
});
|
||||
resizeObserver.observe(container);
|
||||
|
||||
return () => {
|
||||
contentChangeListener.dispose();
|
||||
resizeObserver.disconnect();
|
||||
editor?.dispose();
|
||||
};
|
||||
}
|
||||
}, [monaco, editorId, function_call.name, function_call.arguments, onEdit]);
|
||||
|
||||
return (
|
||||
<VStack w="full" alignItems="flex-start">
|
||||
<HStack w="full">
|
||||
<Text fontWeight="bold" w={192}>
|
||||
Name:
|
||||
</Text>
|
||||
<Input
|
||||
value={function_call.name}
|
||||
onChange={(e) => onEdit({ name: e.target.value, arguments: function_call.arguments })}
|
||||
bgColor="white"
|
||||
/>
|
||||
</HStack>
|
||||
<Text fontWeight="bold" w={32}>
|
||||
Arguments
|
||||
</Text>
|
||||
<VStack
|
||||
borderRadius={4}
|
||||
border="1px solid"
|
||||
borderColor="gray.200"
|
||||
w="full"
|
||||
py={1}
|
||||
bgColor="white"
|
||||
>
|
||||
<Box id={editorId} w="full" />
|
||||
</VStack>
|
||||
</VStack>
|
||||
);
|
||||
};
|
||||
|
||||
export default FunctionCallEditor;
|
||||
128
app/src/components/datasets/DatasetEntriesTable/TableRow.tsx
Normal file
128
app/src/components/datasets/DatasetEntriesTable/TableRow.tsx
Normal file
@@ -0,0 +1,128 @@
|
||||
import { Box, Td, Tr, Thead, Th, Tooltip, HStack, Text, Checkbox } from "@chakra-ui/react";
|
||||
import Link from "next/link";
|
||||
|
||||
import dayjs from "~/utils/dayjs";
|
||||
import { type RouterOutputs } from "~/utils/api";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { useIsClientRehydrated, useDatasetEntries } from "~/utils/hooks";
|
||||
import { useMemo } from "react";
|
||||
|
||||
type DatasetEntry = RouterOutputs["datasetEntries"]["list"]["entries"][0];
|
||||
|
||||
export const TableHeader = () => {
|
||||
const matchingDatasetEntryIds = useDatasetEntries().data?.matchingEntryIds;
|
||||
const selectedDatasetEntryIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
|
||||
const addSelectedIds = useAppStore((s) => s.selectedDatasetEntries.addSelectedIds);
|
||||
const clearSelectedIds = useAppStore((s) => s.selectedDatasetEntries.clearSelectedIds);
|
||||
const allSelected = useMemo(() => {
|
||||
if (!matchingDatasetEntryIds || !matchingDatasetEntryIds.length) return false;
|
||||
return matchingDatasetEntryIds.every((id) => selectedDatasetEntryIds.has(id));
|
||||
}, [matchingDatasetEntryIds, selectedDatasetEntryIds]);
|
||||
const isClientRehydrated = useIsClientRehydrated();
|
||||
if (!isClientRehydrated) return null;
|
||||
|
||||
return (
|
||||
<Thead>
|
||||
<Tr>
|
||||
<Th pr={0}>
|
||||
<HStack minW={16}>
|
||||
<Checkbox
|
||||
isChecked={allSelected}
|
||||
onChange={() => {
|
||||
allSelected ? clearSelectedIds() : addSelectedIds(matchingDatasetEntryIds || []);
|
||||
}}
|
||||
/>
|
||||
<Text>
|
||||
({selectedDatasetEntryIds.size ? `${selectedDatasetEntryIds.size}/` : ""}
|
||||
{matchingDatasetEntryIds?.length || 0})
|
||||
</Text>
|
||||
</HStack>
|
||||
</Th>
|
||||
<Th>Created At</Th>
|
||||
<Th isNumeric>Input tokens</Th>
|
||||
<Th isNumeric>Output tokens</Th>
|
||||
<Th isNumeric>Type</Th>
|
||||
</Tr>
|
||||
</Thead>
|
||||
);
|
||||
};
|
||||
|
||||
export const TableRow = ({
|
||||
datasetEntry,
|
||||
onToggle,
|
||||
showOptions,
|
||||
}: {
|
||||
datasetEntry: DatasetEntry;
|
||||
onToggle: () => void;
|
||||
showOptions?: boolean;
|
||||
}) => {
|
||||
const createdAt = dayjs(datasetEntry.createdAt).format("MMMM D h:mm A");
|
||||
const fullTime = dayjs(datasetEntry.createdAt).toString();
|
||||
|
||||
const isChecked = useAppStore((s) => s.selectedDatasetEntries.selectedIds.has(datasetEntry.id));
|
||||
const toggleChecked = useAppStore((s) => s.selectedDatasetEntries.toggleSelectedId);
|
||||
|
||||
const isClientRehydrated = useIsClientRehydrated();
|
||||
if (!isClientRehydrated) return null;
|
||||
|
||||
return (
|
||||
<Tr
|
||||
onClick={onToggle}
|
||||
key={datasetEntry.id}
|
||||
_hover={{ bgColor: "gray.50", cursor: "pointer" }}
|
||||
fontSize="sm"
|
||||
>
|
||||
{showOptions && (
|
||||
<Td>
|
||||
<Checkbox isChecked={isChecked} onChange={() => toggleChecked(datasetEntry.id)} />
|
||||
</Td>
|
||||
)}
|
||||
<Td>
|
||||
<Tooltip label={fullTime} placement="top">
|
||||
<Box whiteSpace="nowrap" minW="120px">
|
||||
{createdAt}
|
||||
</Box>
|
||||
</Tooltip>
|
||||
</Td>
|
||||
<Td isNumeric>{datasetEntry.inputTokens}</Td>
|
||||
<Td isNumeric>{datasetEntry.outputTokens}</Td>
|
||||
<Td isNumeric>{datasetEntry.type}</Td>
|
||||
</Tr>
|
||||
);
|
||||
};
|
||||
|
||||
export const EmptyTableRow = ({ filtersApplied = true }: { filtersApplied?: boolean }) => {
|
||||
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
|
||||
const filters = useAppStore((state) => state.logFilters.filters);
|
||||
const { isLoading } = useDatasetEntries();
|
||||
|
||||
if (isLoading) return null;
|
||||
|
||||
if (filters.length && filtersApplied) {
|
||||
return (
|
||||
<Tr>
|
||||
<Td w="full" colSpan={visibleColumns.size + 1}>
|
||||
<Text color="gray.500" textAlign="center" w="full" p={4}>
|
||||
No matching entries found. Try removing some filters.
|
||||
</Text>
|
||||
</Td>
|
||||
</Tr>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Tr>
|
||||
<Td w="full" colSpan={visibleColumns.size + 1}>
|
||||
<Text color="gray.500" textAlign="center" w="full" p={4}>
|
||||
This dataset has no entries. Add some logs in the{" "}
|
||||
<Link href="/request-logs">
|
||||
<Text as="span" color="blue.600">
|
||||
Request Logs
|
||||
</Text>
|
||||
</Link>{" "}
|
||||
tab.
|
||||
</Text>
|
||||
</Td>
|
||||
</Tr>
|
||||
);
|
||||
};
|
||||
16
app/src/components/datasets/DatasetEntryPaginator.tsx
Normal file
16
app/src/components/datasets/DatasetEntryPaginator.tsx
Normal file
@@ -0,0 +1,16 @@
|
||||
import { type StackProps } from "@chakra-ui/react";
|
||||
|
||||
import { useDatasetEntries } from "~/utils/hooks";
|
||||
import Paginator from "../Paginator";
|
||||
|
||||
const DatasetEntryPaginator = (props: StackProps) => {
|
||||
const { data } = useDatasetEntries();
|
||||
|
||||
if (!data) return null;
|
||||
|
||||
const { matchingEntryIds } = data;
|
||||
|
||||
return <Paginator count={matchingEntryIds.length} {...props} />;
|
||||
};
|
||||
|
||||
export default DatasetEntryPaginator;
|
||||
20
app/src/components/datasets/DatasetHeaderButtons.tsx
Normal file
20
app/src/components/datasets/DatasetHeaderButtons.tsx
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Button, HStack, Icon, Text } from "@chakra-ui/react";
|
||||
import { useDataset } from "~/utils/hooks";
|
||||
import { BsGearFill } from "react-icons/bs";
|
||||
|
||||
export const DatasetHeaderButtons = ({ openDrawer }: { openDrawer: () => void }) => {
|
||||
const dataset = useDataset();
|
||||
|
||||
if (dataset.isLoading) return null;
|
||||
|
||||
return (
|
||||
<HStack spacing={0} mt={{ base: 2, md: 0 }}>
|
||||
<Button variant={{ base: "solid", md: "ghost" }} onClick={openDrawer}>
|
||||
<HStack>
|
||||
<Icon as={BsGearFill} />
|
||||
<Text>Configure</Text>
|
||||
</HStack>
|
||||
</Button>
|
||||
</HStack>
|
||||
);
|
||||
};
|
||||
52
app/src/components/datasets/DatasetsTable.tsx
Normal file
52
app/src/components/datasets/DatasetsTable.tsx
Normal file
@@ -0,0 +1,52 @@
|
||||
import { Card, Table, Thead, Tr, Th, Tbody, Td, VStack, Icon, Text } from "@chakra-ui/react";
|
||||
import { FaTable } from "react-icons/fa";
|
||||
import Link from "next/link";
|
||||
|
||||
import dayjs from "~/utils/dayjs";
|
||||
import { useDatasets } from "~/utils/hooks";
|
||||
|
||||
const DatasetsTable = ({}) => {
|
||||
const { data } = useDatasets();
|
||||
|
||||
const datasets = data || [];
|
||||
|
||||
return (
|
||||
<Card width="100%" overflowX="auto">
|
||||
{datasets.length ? (
|
||||
<Table>
|
||||
<Thead>
|
||||
<Tr>
|
||||
<Th>Name</Th>
|
||||
<Th>Created At</Th>
|
||||
<Th>Size</Th>
|
||||
</Tr>
|
||||
</Thead>
|
||||
<Tbody>
|
||||
{datasets.map((dataset) => {
|
||||
return (
|
||||
<Tr key={dataset.id}>
|
||||
<Td>
|
||||
<Link href={{ pathname: "/datasets/[id]", query: { id: dataset.id } }}>
|
||||
<Text color="blue.600">{dataset.name}</Text>
|
||||
</Link>
|
||||
</Td>
|
||||
<Td>{dayjs(dataset.createdAt).format("MMMM D h:mm A")}</Td>
|
||||
<Td>{dataset._count.datasetEntries}</Td>
|
||||
</Tr>
|
||||
);
|
||||
})}
|
||||
</Tbody>
|
||||
</Table>
|
||||
) : (
|
||||
<VStack py={8}>
|
||||
<Icon as={FaTable} boxSize={16} color="gray.300" />
|
||||
<Text color="gray.400" fontSize="lg" fontWeight="bold">
|
||||
No Datasets Found. Create your first dataset.
|
||||
</Text>
|
||||
</VStack>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default DatasetsTable;
|
||||
107
app/src/components/datasets/DeleteButton.tsx
Normal file
107
app/src/components/datasets/DeleteButton.tsx
Normal file
@@ -0,0 +1,107 @@
|
||||
import {
|
||||
Modal,
|
||||
ModalOverlay,
|
||||
ModalContent,
|
||||
ModalHeader,
|
||||
ModalCloseButton,
|
||||
ModalBody,
|
||||
ModalFooter,
|
||||
HStack,
|
||||
VStack,
|
||||
Icon,
|
||||
Text,
|
||||
Button,
|
||||
useDisclosure,
|
||||
type UseDisclosureReturn,
|
||||
} from "@chakra-ui/react";
|
||||
import { BsTrash } from "react-icons/bs";
|
||||
|
||||
import { useHandledAsyncCallback, useDataset } from "~/utils/hooks";
|
||||
import { api } from "~/utils/api";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import ActionButton from "../ActionButton";
|
||||
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
|
||||
import pluralize from "pluralize";
|
||||
|
||||
const DeleteButton = () => {
|
||||
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
|
||||
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
return (
|
||||
<>
|
||||
<ActionButton
|
||||
onClick={disclosure.onOpen}
|
||||
label="Delete"
|
||||
icon={BsTrash}
|
||||
isDisabled={selectedIds.size === 0}
|
||||
requireBeta
|
||||
/>
|
||||
<DeleteDatasetEntriesModal disclosure={disclosure} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default DeleteButton;
|
||||
|
||||
const DeleteDatasetEntriesModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
const dataset = useDataset().data;
|
||||
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
|
||||
const clearSelectedIds = useAppStore((s) => s.selectedDatasetEntries.clearSelectedIds);
|
||||
|
||||
const deleteRowsMutation = api.datasetEntries.delete.useMutation();
|
||||
|
||||
const utils = api.useContext();
|
||||
|
||||
const [deleteRows, deletionInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!dataset?.id || !selectedIds.size) return;
|
||||
|
||||
// divide selectedIds into chunks of 15000 to reduce request size
|
||||
const chunkSize = 15000;
|
||||
const idsArray = Array.from(selectedIds);
|
||||
for (let i = 0; i < idsArray.length; i += chunkSize) {
|
||||
const response = await deleteRowsMutation.mutateAsync({
|
||||
ids: idsArray.slice(i, i + chunkSize),
|
||||
});
|
||||
|
||||
if (maybeReportError(response)) return;
|
||||
}
|
||||
|
||||
await utils.datasetEntries.list.invalidate();
|
||||
disclosure.onClose();
|
||||
clearSelectedIds();
|
||||
}, [deleteRowsMutation, dataset, selectedIds, utils]);
|
||||
|
||||
return (
|
||||
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={BsTrash} />
|
||||
<Text>Delete Logs</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
<ModalCloseButton />
|
||||
<ModalBody maxW="unset">
|
||||
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
|
||||
<Text>
|
||||
Are you sure you want to delete the <b>{selectedIds.size}</b>{" "}
|
||||
{pluralize("row", selectedIds.size)} rows you've selected?
|
||||
</Text>
|
||||
</VStack>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<HStack>
|
||||
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button colorScheme="red" onClick={deleteRows} isLoading={deletionInProgress} minW={24}>
|
||||
Delete
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
182
app/src/components/datasets/DownloadButton.tsx
Normal file
182
app/src/components/datasets/DownloadButton.tsx
Normal file
@@ -0,0 +1,182 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import {
|
||||
Modal,
|
||||
ModalOverlay,
|
||||
ModalContent,
|
||||
ModalHeader,
|
||||
ModalCloseButton,
|
||||
ModalBody,
|
||||
ModalFooter,
|
||||
HStack,
|
||||
VStack,
|
||||
Icon,
|
||||
Text,
|
||||
Button,
|
||||
Checkbox,
|
||||
NumberInput,
|
||||
NumberInputField,
|
||||
NumberInputStepper,
|
||||
NumberIncrementStepper,
|
||||
NumberDecrementStepper,
|
||||
Collapse,
|
||||
Flex,
|
||||
useDisclosure,
|
||||
type UseDisclosureReturn,
|
||||
} from "@chakra-ui/react";
|
||||
import { AiOutlineDownload } from "react-icons/ai";
|
||||
|
||||
import { useHandledAsyncCallback, useDataset } from "~/utils/hooks";
|
||||
import { api } from "~/utils/api";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import ActionButton from "../ActionButton";
|
||||
import { FiChevronUp, FiChevronDown } from "react-icons/fi";
|
||||
import InfoCircle from "../InfoCircle";
|
||||
|
||||
const ExportButton = () => {
|
||||
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
|
||||
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
return (
|
||||
<>
|
||||
<ActionButton
|
||||
onClick={disclosure.onOpen}
|
||||
label="Download"
|
||||
icon={AiOutlineDownload}
|
||||
isDisabled={selectedIds.size === 0}
|
||||
requireBeta
|
||||
/>
|
||||
<ExportDatasetEntriesModal disclosure={disclosure} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ExportButton;
|
||||
|
||||
const ExportDatasetEntriesModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
const dataset = useDataset().data;
|
||||
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
|
||||
const clearSelectedIds = useAppStore((s) => s.selectedDatasetEntries.clearSelectedIds);
|
||||
|
||||
const [testingSplit, setTestingSplit] = useState(10);
|
||||
const [removeDuplicates, setRemoveDuplicates] = useState(false);
|
||||
const [showAdvancedOptions, setShowAdvancedOptions] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (disclosure.isOpen) {
|
||||
setTestingSplit(10);
|
||||
setRemoveDuplicates(false);
|
||||
}
|
||||
}, [disclosure.isOpen]);
|
||||
|
||||
const exportDataMutation = api.datasetEntries.export.useMutation();
|
||||
|
||||
const [exportData, exportInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!dataset?.id || !selectedIds.size || !testingSplit) return;
|
||||
const response = await exportDataMutation.mutateAsync({
|
||||
datasetId: dataset.id,
|
||||
datasetEntryIds: Array.from(selectedIds),
|
||||
testingSplit,
|
||||
removeDuplicates,
|
||||
});
|
||||
|
||||
const dataUrl = `data:application/pdf;base64,${response}`;
|
||||
const blob = await fetch(dataUrl).then((res) => res.blob());
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement("a");
|
||||
|
||||
a.href = url;
|
||||
a.download = `data.zip`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
|
||||
disclosure.onClose();
|
||||
clearSelectedIds();
|
||||
}, [exportDataMutation, dataset, selectedIds, testingSplit, removeDuplicates]);
|
||||
|
||||
return (
|
||||
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={AiOutlineDownload} />
|
||||
<Text>Export Logs</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
<ModalCloseButton />
|
||||
<ModalBody maxW="unset">
|
||||
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
|
||||
<Text>
|
||||
We'll export the <b>{selectedIds.size}</b> rows you have selected in the OpenAI
|
||||
training format.
|
||||
</Text>
|
||||
<VStack alignItems="flex-start" spacing={4}>
|
||||
<Flex
|
||||
flexDir={{ base: "column", md: "row" }}
|
||||
alignItems={{ base: "flex-start", md: "center" }}
|
||||
>
|
||||
<HStack w={48} alignItems="center" spacing={1}>
|
||||
<Text fontWeight="bold">Testing Split:</Text>
|
||||
<InfoCircle tooltipText="The percent of your logs that will be reserved for testing and saved in another file. Logs are split randomly." />
|
||||
</HStack>
|
||||
<HStack>
|
||||
<NumberInput
|
||||
defaultValue={10}
|
||||
onChange={(_, num) => setTestingSplit(num)}
|
||||
min={1}
|
||||
max={100}
|
||||
w={48}
|
||||
>
|
||||
<NumberInputField />
|
||||
<NumberInputStepper>
|
||||
<NumberIncrementStepper />
|
||||
<NumberDecrementStepper />
|
||||
</NumberInputStepper>
|
||||
</NumberInput>
|
||||
</HStack>
|
||||
</Flex>
|
||||
</VStack>
|
||||
<VStack alignItems="flex-start" spacing={0}>
|
||||
<Button
|
||||
variant="unstyled"
|
||||
color="blue.600"
|
||||
onClick={() => setShowAdvancedOptions(!showAdvancedOptions)}
|
||||
>
|
||||
<HStack>
|
||||
<Text>Advanced Options</Text>
|
||||
<Icon as={showAdvancedOptions ? FiChevronUp : FiChevronDown} />
|
||||
</HStack>
|
||||
</Button>
|
||||
<Collapse in={showAdvancedOptions} unmountOnExit={true}>
|
||||
<VStack align="stretch" pt={4}>
|
||||
<HStack>
|
||||
<Checkbox
|
||||
colorScheme="blue"
|
||||
isChecked={removeDuplicates}
|
||||
onChange={(e) => setRemoveDuplicates(e.target.checked)}
|
||||
>
|
||||
<Text>Remove duplicates</Text>
|
||||
</Checkbox>
|
||||
<InfoCircle tooltipText="To avoid overfitting and speed up training, automatically deduplicate logs with matching input and output." />
|
||||
</HStack>
|
||||
</VStack>
|
||||
</Collapse>
|
||||
</VStack>
|
||||
</VStack>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<HStack>
|
||||
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button colorScheme="blue" onClick={exportData} isLoading={exportInProgress} minW={24}>
|
||||
Download
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
21
app/src/components/datasets/ExperimentButton.tsx
Normal file
21
app/src/components/datasets/ExperimentButton.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
import { RiFlaskLine } from "react-icons/ri";
|
||||
|
||||
import { useAppStore } from "~/state/store";
|
||||
import ActionButton from "../ActionButton";
|
||||
|
||||
const ExperimentButton = () => {
|
||||
const selectedIds = useAppStore((s) => s.selectedDatasetEntries.selectedIds);
|
||||
return (
|
||||
<ActionButton
|
||||
onClick={() => {
|
||||
console.log("experimenting with these ids", selectedIds);
|
||||
}}
|
||||
label="Experiment"
|
||||
icon={RiFlaskLine}
|
||||
isDisabled={selectedIds.size === 0}
|
||||
requireBeta
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default ExperimentButton;
|
||||
148
app/src/components/datasets/FileUploadsCard.tsx
Normal file
148
app/src/components/datasets/FileUploadsCard.tsx
Normal file
@@ -0,0 +1,148 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { VStack, HStack, Button, Text, Progress, IconButton, Portal } from "@chakra-ui/react";
|
||||
import { BsX } from "react-icons/bs";
|
||||
|
||||
import { type RouterOutputs, api } from "~/utils/api";
|
||||
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { formatFileSize } from "~/utils/utils";
|
||||
|
||||
type FileUpload = RouterOutputs["datasets"]["listFileUploads"][0];
|
||||
|
||||
const FileUploadsCard = () => {
|
||||
const dataset = useDataset();
|
||||
const [fileUploadsRefetchInterval, setFileUploadsRefetchInterval] = useState<number>(500);
|
||||
const fileUploads = api.datasets.listFileUploads.useQuery(
|
||||
{ datasetId: dataset.data?.id as string },
|
||||
{ enabled: !!dataset.data?.id, refetchInterval: fileUploadsRefetchInterval },
|
||||
);
|
||||
useEffect(() => {
|
||||
if (fileUploads?.data?.some((fu) => fu.status !== "COMPLETE" && fu.status !== "ERROR")) {
|
||||
setFileUploadsRefetchInterval(500);
|
||||
} else {
|
||||
setFileUploadsRefetchInterval(15000);
|
||||
}
|
||||
}, [fileUploads]);
|
||||
|
||||
const utils = api.useContext();
|
||||
|
||||
const hideFileUploadsMutation = api.datasets.hideFileUploads.useMutation();
|
||||
const [hideAllFileUploads] = useHandledAsyncCallback(async () => {
|
||||
if (!fileUploads.data?.length) return;
|
||||
await hideFileUploadsMutation.mutateAsync({
|
||||
fileUploadIds: fileUploads.data.map((upload) => upload.id),
|
||||
});
|
||||
await utils.datasets.listFileUploads.invalidate();
|
||||
}, [hideFileUploadsMutation, fileUploads.data, utils]);
|
||||
|
||||
if (!fileUploads.data?.length) return null;
|
||||
|
||||
return (
|
||||
<Portal>
|
||||
<VStack
|
||||
w={72}
|
||||
borderRadius={8}
|
||||
position="fixed"
|
||||
bottom={8}
|
||||
right={8}
|
||||
overflow="hidden"
|
||||
borderWidth={1}
|
||||
boxShadow="0 0 40px 4px rgba(0, 0, 0, 0.1);"
|
||||
minW={0}
|
||||
bgColor="white"
|
||||
>
|
||||
<HStack p={4} w="full" bgColor="gray.200" justifyContent="space-between">
|
||||
<Text fontWeight="bold">Uploads</Text>
|
||||
<IconButton
|
||||
aria-label="Close uploads"
|
||||
as={BsX}
|
||||
boxSize={6}
|
||||
minW={0}
|
||||
variant="ghost"
|
||||
onClick={hideAllFileUploads}
|
||||
cursor="pointer"
|
||||
/>
|
||||
</HStack>
|
||||
{fileUploads?.data?.map((upload) => <FileUploadRow key={upload.id} fileUpload={upload} />)}
|
||||
</VStack>
|
||||
</Portal>
|
||||
);
|
||||
};
|
||||
|
||||
export default FileUploadsCard;
|
||||
|
||||
const FileUploadRow = ({ fileUpload }: { fileUpload: FileUpload }) => {
|
||||
const { id, fileName, fileSize, progress, status, errorMessage } = fileUpload;
|
||||
|
||||
const utils = api.useContext();
|
||||
|
||||
const hideFileUploadsMutation = api.datasets.hideFileUploads.useMutation();
|
||||
const [hideFileUpload, hidingInProgress] = useHandledAsyncCallback(async () => {
|
||||
await hideFileUploadsMutation.mutateAsync({ fileUploadIds: [id] });
|
||||
}, [id, hideFileUploadsMutation, utils]);
|
||||
|
||||
const [refreshDatasetEntries] = useHandledAsyncCallback(async () => {
|
||||
await hideFileUploadsMutation.mutateAsync({ fileUploadIds: [id] });
|
||||
await utils.datasets.listFileUploads.invalidate();
|
||||
await utils.datasetEntries.list.invalidate();
|
||||
}, [id, hideFileUploadsMutation, utils]);
|
||||
|
||||
return (
|
||||
<VStack w="full" alignItems="flex-start" p={4} borderBottomWidth={1}>
|
||||
<HStack w="full" justifyContent="space-between" alignItems="flex-start">
|
||||
<VStack alignItems="flex-start" spacing={0}>
|
||||
<Text fontWeight="bold">{fileName}</Text>
|
||||
<Text fontSize="xs">({formatFileSize(fileSize, 2)})</Text>
|
||||
</VStack>
|
||||
|
||||
<HStack spacing={0}>
|
||||
{status === "COMPLETE" ? (
|
||||
<Button variant="ghost" onClick={refreshDatasetEntries} color="orange.400" size="xs">
|
||||
Refresh Table
|
||||
</Button>
|
||||
) : (
|
||||
<IconButton
|
||||
aria-label="Hide file upload"
|
||||
as={BsX}
|
||||
boxSize={6}
|
||||
minW={0}
|
||||
variant="ghost"
|
||||
isLoading={hidingInProgress}
|
||||
onClick={hideFileUpload}
|
||||
cursor="pointer"
|
||||
/>
|
||||
)}
|
||||
</HStack>
|
||||
</HStack>
|
||||
|
||||
{errorMessage ? (
|
||||
<Text alignSelf="center" pt={2}>
|
||||
{errorMessage}
|
||||
</Text>
|
||||
) : (
|
||||
<>
|
||||
<Text alignSelf="center" fontSize="xs">
|
||||
{getStatusText(status)}
|
||||
</Text>
|
||||
<Progress w="full" value={progress} borderRadius={2} />
|
||||
</>
|
||||
)}
|
||||
</VStack>
|
||||
);
|
||||
};
|
||||
|
||||
const getStatusText = (status: FileUpload["status"]) => {
|
||||
switch (status) {
|
||||
case "PENDING":
|
||||
return "Pending";
|
||||
case "DOWNLOADING":
|
||||
return "Downloading to Server";
|
||||
case "PROCESSING":
|
||||
return "Processing";
|
||||
case "SAVING":
|
||||
return "Saving";
|
||||
case "COMPLETE":
|
||||
return "Complete";
|
||||
case "ERROR":
|
||||
return "Error";
|
||||
}
|
||||
};
|
||||
@@ -20,17 +20,18 @@ import { AiTwotoneThunderbolt } from "react-icons/ai";
|
||||
import humanId from "human-id";
|
||||
import { useRouter } from "next/router";
|
||||
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { useDataset, useDatasetEntries, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { api } from "~/utils/api";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import ActionButton from "./ActionButton";
|
||||
import ActionButton from "../ActionButton";
|
||||
import InputDropdown from "../InputDropdown";
|
||||
import { FiChevronDown } from "react-icons/fi";
|
||||
// import { FiChevronDown } from "react-icons/fi";
|
||||
|
||||
const SUPPORTED_BASE_MODELS = ["llama2-7b", "llama2-13b", "llama2-70b", "gpt-3.5-turbo"];
|
||||
|
||||
const FineTuneButton = () => {
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
const datasetEntries = useDatasetEntries().data;
|
||||
|
||||
const numEntries = datasetEntries?.matchingEntryIds.length || 0;
|
||||
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
@@ -40,7 +41,7 @@ const FineTuneButton = () => {
|
||||
onClick={disclosure.onOpen}
|
||||
label="Fine Tune"
|
||||
icon={AiTwotoneThunderbolt}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
isDisabled={numEntries === 0}
|
||||
requireBeta
|
||||
/>
|
||||
<FineTuneModal disclosure={disclosure} />
|
||||
@@ -51,9 +52,8 @@ const FineTuneButton = () => {
|
||||
export default FineTuneButton;
|
||||
|
||||
const FineTuneModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
const selectedProjectId = useAppStore((s) => s.selectedProjectId);
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
const clearSelectedLogIds = useAppStore((s) => s.selectedLogs.clearSelectedLogIds);
|
||||
const dataset = useDataset().data;
|
||||
const datasetEntries = useDatasetEntries().data;
|
||||
|
||||
const [selectedBaseModel, setSelectedBaseModel] = useState(SUPPORTED_BASE_MODELS[0]);
|
||||
const [modelSlug, setModelSlug] = useState(humanId({ separator: "-", capitalize: false }));
|
||||
@@ -71,19 +71,17 @@ const FineTuneModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
const createFineTuneMutation = api.fineTunes.create.useMutation();
|
||||
|
||||
const [createFineTune, creationInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!selectedProjectId || !modelSlug || !selectedBaseModel || !selectedLogIds.size) return;
|
||||
if (!modelSlug || !selectedBaseModel || !dataset) return;
|
||||
await createFineTuneMutation.mutateAsync({
|
||||
projectId: selectedProjectId,
|
||||
slug: modelSlug,
|
||||
baseModel: selectedBaseModel,
|
||||
selectedLogIds: Array.from(selectedLogIds),
|
||||
datasetId: dataset.id,
|
||||
});
|
||||
|
||||
await utils.fineTunes.list.invalidate();
|
||||
await router.push({ pathname: "/fine-tunes" });
|
||||
clearSelectedLogIds();
|
||||
disclosure.onClose();
|
||||
}, [createFineTuneMutation, selectedProjectId, selectedLogIds, modelSlug, selectedBaseModel]);
|
||||
}, [createFineTuneMutation, modelSlug, selectedBaseModel]);
|
||||
|
||||
return (
|
||||
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
|
||||
@@ -99,7 +97,8 @@ const FineTuneModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
<ModalBody maxW="unset">
|
||||
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
|
||||
<Text>
|
||||
We'll train on the <b>{selectedLogIds.size}</b> logs you've selected.
|
||||
We'll train on <b>{datasetEntries?.trainingCount}</b> and test on{" "}
|
||||
<b>{datasetEntries?.testingCount}</b> entries in this dataset.
|
||||
</Text>
|
||||
<VStack>
|
||||
<HStack spacing={2} w="full">
|
||||
@@ -132,12 +131,12 @@ const FineTuneModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
/>
|
||||
</HStack>
|
||||
</VStack>
|
||||
<Button variant="unstyled" color="blue.600">
|
||||
{/* <Button variant="unstyled" color="blue.600">
|
||||
<HStack>
|
||||
<Text>Advanced Options</Text>
|
||||
<Icon as={FiChevronDown} />
|
||||
</HStack>
|
||||
</Button>
|
||||
</Button> */}
|
||||
</VStack>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
276
app/src/components/datasets/UploadDataButton.tsx
Normal file
276
app/src/components/datasets/UploadDataButton.tsx
Normal file
@@ -0,0 +1,276 @@
|
||||
import { useState, useEffect, useRef, useCallback } from "react";
|
||||
import {
|
||||
Modal,
|
||||
ModalOverlay,
|
||||
ModalContent,
|
||||
ModalHeader,
|
||||
ModalCloseButton,
|
||||
ModalBody,
|
||||
ModalFooter,
|
||||
HStack,
|
||||
VStack,
|
||||
Icon,
|
||||
Text,
|
||||
Button,
|
||||
Box,
|
||||
useDisclosure,
|
||||
type UseDisclosureReturn,
|
||||
} from "@chakra-ui/react";
|
||||
import pluralize from "pluralize";
|
||||
import { AiOutlineCloudUpload, AiOutlineFile } from "react-icons/ai";
|
||||
|
||||
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { api } from "~/utils/api";
|
||||
import ActionButton from "../ActionButton";
|
||||
import { validateTrainingRows, type TrainingRow, parseJSONL } from "./validateTrainingRows";
|
||||
import { uploadDatasetEntryFile } from "~/utils/azure/website";
|
||||
import { formatFileSize } from "~/utils/utils";
|
||||
|
||||
const UploadDataButton = () => {
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
return (
|
||||
<>
|
||||
<ActionButton
|
||||
onClick={disclosure.onOpen}
|
||||
label="Upload Data"
|
||||
icon={AiOutlineCloudUpload}
|
||||
iconBoxSize={4}
|
||||
requireBeta
|
||||
/>
|
||||
<UploadDataModal disclosure={disclosure} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default UploadDataButton;
|
||||
|
||||
const UploadDataModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
const dataset = useDataset().data;
|
||||
|
||||
const [validationError, setValidationError] = useState<string | null>(null);
|
||||
const [trainingRows, setTrainingRows] = useState<TrainingRow[] | null>(null);
|
||||
const [file, setFile] = useState<File | null>(null);
|
||||
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
const handleFileDrop = (e: React.DragEvent<HTMLDivElement>) => {
|
||||
e.preventDefault();
|
||||
const files = e.dataTransfer.files;
|
||||
if (files.length > 0) {
|
||||
processFile(files[0] as File);
|
||||
}
|
||||
};
|
||||
|
||||
const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const files = e.target.files;
|
||||
if (files && files.length > 0) {
|
||||
processFile(files[0] as File);
|
||||
}
|
||||
};
|
||||
|
||||
const processFile = (file: File) => {
|
||||
setFile(file);
|
||||
|
||||
// skip reading if file is larger than 10MB
|
||||
if (file.size > 10000000) {
|
||||
setTrainingRows(null);
|
||||
return;
|
||||
}
|
||||
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e: ProgressEvent<FileReader>) => {
|
||||
const content = e.target?.result as string;
|
||||
// Process the content, e.g., set to state
|
||||
let parsedJSONL;
|
||||
try {
|
||||
parsedJSONL = parseJSONL(content) as TrainingRow[];
|
||||
const validationError = validateTrainingRows(parsedJSONL);
|
||||
if (validationError) {
|
||||
setValidationError(validationError);
|
||||
setTrainingRows(null);
|
||||
return;
|
||||
}
|
||||
setTrainingRows(parsedJSONL);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (e: any) {
|
||||
setValidationError("Unable to parse JSONL file: " + (e.message as string));
|
||||
setTrainingRows(null);
|
||||
return;
|
||||
}
|
||||
};
|
||||
reader.readAsText(file);
|
||||
};
|
||||
|
||||
const resetState = useCallback(() => {
|
||||
setValidationError(null);
|
||||
setTrainingRows(null);
|
||||
setFile(null);
|
||||
}, [setValidationError, setTrainingRows, setFile]);
|
||||
|
||||
useEffect(() => {
|
||||
if (disclosure.isOpen) {
|
||||
resetState();
|
||||
}
|
||||
}, [disclosure.isOpen, resetState]);
|
||||
|
||||
const triggerFileDownloadMutation = api.datasets.triggerFileDownload.useMutation();
|
||||
|
||||
const utils = api.useContext();
|
||||
|
||||
const [sendJSONL, sendingInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!dataset || !file) return;
|
||||
|
||||
const blobName = await uploadDatasetEntryFile(file);
|
||||
|
||||
await triggerFileDownloadMutation.mutateAsync({
|
||||
datasetId: dataset.id,
|
||||
blobName,
|
||||
fileName: file.name,
|
||||
fileSize: file.size,
|
||||
});
|
||||
|
||||
await utils.datasets.listFileUploads.invalidate();
|
||||
|
||||
disclosure.onClose();
|
||||
}, [dataset, trainingRows, triggerFileDownloadMutation, file, utils]);
|
||||
|
||||
return (
|
||||
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Text>Upload Training Logs</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
<ModalCloseButton />
|
||||
<ModalBody maxW="unset" p={8}>
|
||||
<Box w="full" aspectRatio={1.5}>
|
||||
{validationError && (
|
||||
<VStack w="full" h="full" justifyContent="center" spacing={8}>
|
||||
<Icon as={AiOutlineFile} boxSize={24} color="gray.300" />
|
||||
<VStack w="full">
|
||||
<Text fontSize={32} color="gray.500" fontWeight="bold">
|
||||
Error
|
||||
</Text>
|
||||
<Text color="gray.500">{validationError}</Text>
|
||||
</VStack>
|
||||
<Text
|
||||
as="span"
|
||||
textDecor="underline"
|
||||
color="gray.500"
|
||||
_hover={{ color: "orange.400" }}
|
||||
cursor="pointer"
|
||||
onClick={resetState}
|
||||
>
|
||||
Try again
|
||||
</Text>
|
||||
</VStack>
|
||||
)}
|
||||
{!validationError && !file && (
|
||||
<VStack
|
||||
w="full"
|
||||
h="full"
|
||||
stroke="gray.300"
|
||||
justifyContent="center"
|
||||
borderRadius={8}
|
||||
sx={{
|
||||
"background-image": `url("data:image/svg+xml,%3csvg width='100%25' height='100%25' xmlns='http://www.w3.org/2000/svg'%3e%3crect x='2%25' y='2%25' width='96%25' height='96%25' fill='none' stroke='%23eee' stroke-width='4' stroke-dasharray='6%2c 14' stroke-dashoffset='0' stroke-linecap='square' rx='8' ry='8'/%3e%3c/svg%3e")`,
|
||||
}}
|
||||
onDragOver={(e) => e.preventDefault()}
|
||||
onDrop={handleFileDrop}
|
||||
>
|
||||
<JsonFileIcon />
|
||||
<Icon as={AiOutlineCloudUpload} boxSize={24} color="gray.300" />
|
||||
|
||||
<Text fontSize={32} color="gray.500" fontWeight="bold">
|
||||
Drag & Drop
|
||||
</Text>
|
||||
<Text color="gray.500">
|
||||
your .jsonl file here, or{" "}
|
||||
<input
|
||||
type="file"
|
||||
ref={fileInputRef}
|
||||
onChange={handleFileChange}
|
||||
style={{ display: "none" }}
|
||||
accept=".jsonl"
|
||||
/>
|
||||
<Text
|
||||
as="span"
|
||||
textDecor="underline"
|
||||
_hover={{ color: "orange.400" }}
|
||||
cursor="pointer"
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
>
|
||||
browse
|
||||
</Text>
|
||||
</Text>
|
||||
</VStack>
|
||||
)}
|
||||
{!validationError && file && (
|
||||
<VStack w="full" h="full" justifyContent="center" spacing={8}>
|
||||
<JsonFileIcon />
|
||||
<VStack w="full">
|
||||
{trainingRows ? (
|
||||
<>
|
||||
<Text fontSize={32} color="gray.500" fontWeight="bold">
|
||||
Success
|
||||
</Text>
|
||||
<Text color="gray.500">
|
||||
We'll upload <b>{trainingRows.length}</b>{" "}
|
||||
{pluralize("row", trainingRows.length)} into <b>{dataset?.name}</b>.{" "}
|
||||
</Text>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Text fontSize={32} color="gray.500" fontWeight="bold">
|
||||
{file.name}
|
||||
</Text>
|
||||
<Text color="gray.500">{formatFileSize(file.size)}</Text>
|
||||
</>
|
||||
)}
|
||||
</VStack>
|
||||
<Text
|
||||
as="span"
|
||||
textDecor="underline"
|
||||
color="gray.500"
|
||||
_hover={{ color: "orange.400" }}
|
||||
cursor="pointer"
|
||||
onClick={resetState}
|
||||
>
|
||||
Change file
|
||||
</Text>
|
||||
</VStack>
|
||||
)}
|
||||
</Box>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<HStack>
|
||||
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
colorScheme="orange"
|
||||
onClick={sendJSONL}
|
||||
isLoading={sendingInProgress}
|
||||
minW={24}
|
||||
isDisabled={!file || !!validationError}
|
||||
>
|
||||
Upload
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
const JsonFileIcon = () => (
|
||||
<Box position="relative" display="flex" alignItems="center" justifyContent="center">
|
||||
<Icon as={AiOutlineFile} boxSize={24} color="gray.300" />
|
||||
<Text position="absolute" color="orange.400" fontWeight="bold" fontSize={12} pt={4}>
|
||||
JSONL
|
||||
</Text>
|
||||
</Box>
|
||||
);
|
||||
71
app/src/components/datasets/validateTrainingRows.ts
Normal file
71
app/src/components/datasets/validateTrainingRows.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { type CreateChatCompletionRequestMessage } from "openai/resources/chat";
|
||||
|
||||
export type TrainingRow = {
|
||||
input: CreateChatCompletionRequestMessage[];
|
||||
output?: CreateChatCompletionRequestMessage;
|
||||
};
|
||||
|
||||
export const parseJSONL = (jsonlString: string): unknown[] => {
|
||||
const lines = jsonlString.trim().split("\n");
|
||||
|
||||
let lineNumber = 0;
|
||||
const parsedLines = [];
|
||||
|
||||
try {
|
||||
for (const line of lines) {
|
||||
lineNumber++;
|
||||
parsedLines.push(JSON.parse(line));
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (e: any) {
|
||||
throw new Error(`Error parsing line ${lineNumber}: ${e.message as string}`);
|
||||
}
|
||||
return parsedLines;
|
||||
};
|
||||
|
||||
export const validateTrainingRows = (rows: unknown): string | null => {
|
||||
if (!Array.isArray(rows)) return "training data is not an array";
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const row = rows[i] as TrainingRow;
|
||||
let errorMessage: string | null = null;
|
||||
try {
|
||||
errorMessage = validateTrainingRow(row);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (error: any) {
|
||||
errorMessage = error.message;
|
||||
}
|
||||
if (errorMessage) return `row ${i + 1}: ${errorMessage}`;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
const validateTrainingRow = (row: TrainingRow): string | null => {
|
||||
if (!row) return "empty row";
|
||||
if (!row.input) return "missing input";
|
||||
|
||||
// Validate input
|
||||
if (!Array.isArray(row.input)) return "input is not an array";
|
||||
if ((row.input as unknown[]).some((x) => typeof x !== "object"))
|
||||
return "input contains invalid item";
|
||||
if (row.input.some((x) => !x)) return "input contains empty item";
|
||||
if (row.input.some((x) => !x.content && !x.function_call))
|
||||
return "input contains item with no content or function_call";
|
||||
if (row.input.some((x) => x.function_call && !x.function_call.arguments))
|
||||
return "input contains item with function_call but no arguments";
|
||||
if (row.input.some((x) => x.function_call && !x.function_call.name))
|
||||
return "input contains item with function_call but no name";
|
||||
|
||||
// Validate output
|
||||
if (row.output) {
|
||||
if (typeof row.output !== "object") return "output is not an object";
|
||||
if (!row.output.content && !row.output.function_call)
|
||||
return "output contains no content or function_call";
|
||||
if (row.output.function_call && !row.output.function_call.arguments)
|
||||
return "output contains function_call but no arguments";
|
||||
if (row.output.function_call && !row.output.function_call.name)
|
||||
return "output contains function_call but no name";
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
@@ -27,7 +27,7 @@ const DeleteExperimentDialog = ({
|
||||
const mutation = api.experiments.delete.useMutation();
|
||||
const utils = api.useContext();
|
||||
|
||||
const [onDeleteConfirm] = useHandledAsyncCallback(async () => {
|
||||
const [onDeleteConfirm, deletionInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!experimentId) return;
|
||||
await mutation.mutateAsync({ id: experimentId });
|
||||
await utils.experiments.list.invalidate();
|
||||
@@ -53,7 +53,12 @@ const DeleteExperimentDialog = ({
|
||||
<Button ref={cancelRef} onClick={disclosure.onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button colorScheme="red" onClick={onDeleteConfirm} ml={3}>
|
||||
<Button
|
||||
colorScheme="red"
|
||||
isLoading={deletionInProgress}
|
||||
onClick={onDeleteConfirm}
|
||||
ml={3}
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</AlertDialogFooter>
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
import {
|
||||
Button,
|
||||
AlertDialog,
|
||||
AlertDialogBody,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogContent,
|
||||
AlertDialogOverlay,
|
||||
} from "@chakra-ui/react";
|
||||
|
||||
import { useRouter } from "next/router";
|
||||
import { useRef } from "react";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
|
||||
export const DeleteDialog = ({ onClose }: { onClose: () => void }) => {
|
||||
const experiment = useExperiment();
|
||||
const deleteMutation = api.experiments.delete.useMutation();
|
||||
const utils = api.useContext();
|
||||
const router = useRouter();
|
||||
|
||||
const cancelRef = useRef<HTMLButtonElement>(null);
|
||||
|
||||
const [onDeleteConfirm] = useHandledAsyncCallback(async () => {
|
||||
if (!experiment.data?.id) return;
|
||||
await deleteMutation.mutateAsync({ id: experiment.data.id });
|
||||
await utils.experiments.list.invalidate();
|
||||
await router.push({ pathname: "/experiments" });
|
||||
onClose();
|
||||
}, [deleteMutation, experiment.data?.id, router]);
|
||||
|
||||
return (
|
||||
<AlertDialog isOpen leastDestructiveRef={cancelRef} onClose={onClose}>
|
||||
<AlertDialogOverlay>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader fontSize="lg" fontWeight="bold">
|
||||
Delete Experiment
|
||||
</AlertDialogHeader>
|
||||
|
||||
<AlertDialogBody>
|
||||
If you delete this experiment all the associated prompts and scenarios will be deleted
|
||||
as well. Are you sure?
|
||||
</AlertDialogBody>
|
||||
|
||||
<AlertDialogFooter>
|
||||
<Button ref={cancelRef} onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button colorScheme="red" onClick={onDeleteConfirm} ml={3}>
|
||||
Delete
|
||||
</Button>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialogOverlay>
|
||||
</AlertDialog>
|
||||
);
|
||||
};
|
||||
@@ -3,17 +3,14 @@ import { useOnForkButtonPressed } from "./useOnForkButtonPressed";
|
||||
import { useExperiment } from "~/utils/hooks";
|
||||
import { BsGearFill } from "react-icons/bs";
|
||||
import { TbGitFork } from "react-icons/tb";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
export const ExperimentHeaderButtons = () => {
|
||||
export const ExperimentHeaderButtons = ({ openDrawer }: { openDrawer: () => void }) => {
|
||||
const experiment = useExperiment();
|
||||
|
||||
const canModify = experiment.data?.access.canModify ?? false;
|
||||
|
||||
const { onForkButtonPressed, isForking } = useOnForkButtonPressed();
|
||||
|
||||
const openDrawer = useAppStore((s) => s.openDrawer);
|
||||
|
||||
if (experiment.isLoading) return null;
|
||||
|
||||
return (
|
||||
|
||||
@@ -2,17 +2,15 @@ import { Button, Icon, useDisclosure, Text } from "@chakra-ui/react";
|
||||
import { useRouter } from "next/router";
|
||||
import { BsTrash } from "react-icons/bs";
|
||||
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import DeleteExperimentDialog from "../experiments/DeleteExperimentDialog";
|
||||
import DeleteExperimentDialog from "../DeleteExperimentDialog";
|
||||
|
||||
export const DeleteButton = () => {
|
||||
export const DeleteButton = ({ closeDrawer }: { closeDrawer: () => void }) => {
|
||||
const experiment = useExperiment();
|
||||
const router = useRouter();
|
||||
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
const closeDrawer = useAppStore((s) => s.closeDrawer);
|
||||
const [onDelete] = useHandledAsyncCallback(async () => {
|
||||
await router.push({ pathname: "/experiments" });
|
||||
closeDrawer();
|
||||
@@ -19,7 +19,7 @@ import { useCallback, useState } from "react";
|
||||
import { BsPencil, BsX } from "react-icons/bs";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import AutoResizeTextArea from "../AutoResizeTextArea";
|
||||
import AutoResizeTextArea from "~/components/AutoResizeTextArea";
|
||||
|
||||
type EvalValues = Pick<Evaluation, "label" | "value" | "evalType">;
|
||||
|
||||
@@ -5,7 +5,7 @@ import { BsPencil, BsX } from "react-icons/bs";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment, useHandledAsyncCallback, useScenarioVars } from "~/utils/hooks";
|
||||
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
|
||||
import { FloatingLabelInput } from "./FloatingLabelInput";
|
||||
import { FloatingLabelInput } from "~/components/OutputsTable/FloatingLabelInput";
|
||||
|
||||
export const ScenarioVar = ({
|
||||
variable,
|
||||
@@ -7,18 +7,19 @@ import {
|
||||
DrawerOverlay,
|
||||
Heading,
|
||||
VStack,
|
||||
type UseDisclosureReturn,
|
||||
} from "@chakra-ui/react";
|
||||
import EditScenarioVars from "../OutputsTable/EditScenarioVars";
|
||||
import EditEvaluations from "../OutputsTable/EditEvaluations";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import EditScenarioVars from "./EditScenarioVars";
|
||||
import EditEvaluations from "./EditEvaluations";
|
||||
import { DeleteButton } from "./DeleteButton";
|
||||
|
||||
export default function ExperimentSettingsDrawer() {
|
||||
const isOpen = useAppStore((state) => state.drawerOpen);
|
||||
const closeDrawer = useAppStore((state) => state.closeDrawer);
|
||||
|
||||
export default function ExperimentSettingsDrawer({
|
||||
disclosure,
|
||||
}: {
|
||||
disclosure: UseDisclosureReturn;
|
||||
}) {
|
||||
return (
|
||||
<Drawer isOpen={isOpen} placement="right" onClose={closeDrawer} size="md">
|
||||
<Drawer placement="right" size="md" {...disclosure}>
|
||||
<DrawerOverlay />
|
||||
<DrawerContent>
|
||||
<DrawerCloseButton />
|
||||
@@ -31,7 +32,7 @@ export default function ExperimentSettingsDrawer() {
|
||||
<EditScenarioVars />
|
||||
<EditEvaluations />
|
||||
</VStack>
|
||||
<DeleteButton />
|
||||
<DeleteButton closeDrawer={disclosure.onClose} />
|
||||
</VStack>
|
||||
</DrawerBody>
|
||||
</DrawerContent>
|
||||
@@ -17,7 +17,7 @@ import { useRouter } from "next/router";
|
||||
import { BsGearFill, BsGithub, BsPersonCircle } from "react-icons/bs";
|
||||
import { IoStatsChartOutline } from "react-icons/io5";
|
||||
import { RiHome3Line, RiFlaskLine } from "react-icons/ri";
|
||||
import { AiOutlineThunderbolt } from "react-icons/ai";
|
||||
import { AiOutlineThunderbolt, AiOutlineDatabase } from "react-icons/ai";
|
||||
import { FaReadme } from "react-icons/fa";
|
||||
import { signIn, useSession } from "next-auth/react";
|
||||
|
||||
@@ -78,6 +78,7 @@ const NavSidebar = () => {
|
||||
|
||||
<IconLink icon={RiHome3Line} label="Dashboard" href="/dashboard" />
|
||||
<IconLink icon={IoStatsChartOutline} label="Request Logs" href="/request-logs" />
|
||||
<IconLink icon={AiOutlineDatabase} label="Datasets" href="/datasets" beta />
|
||||
<IconLink icon={AiOutlineThunderbolt} label="Fine Tunes" href="/fine-tunes" beta />
|
||||
<IconLink icon={RiFlaskLine} label="Experiments" href="/experiments" />
|
||||
<VStack w="full" alignItems="flex-start" spacing={0} pt={8}>
|
||||
@@ -116,8 +117,8 @@ const NavSidebar = () => {
|
||||
</VStack>
|
||||
<HStack
|
||||
w="full"
|
||||
px={{ base: 2, md: 4 }}
|
||||
py={{ base: 1, md: 2 }}
|
||||
px={{ base: 3, md: 4 }}
|
||||
py={{ base: 0, md: 1 }}
|
||||
as={ChakraLink}
|
||||
justifyContent="start"
|
||||
href="https://docs.openpipe.ai"
|
||||
@@ -126,8 +127,8 @@ const NavSidebar = () => {
|
||||
spacing={1}
|
||||
>
|
||||
<Icon as={FaReadme} boxSize={4} mr={2} />
|
||||
<Text fontWeight="bold" fontSize="sm">
|
||||
Read the Docs
|
||||
<Text fontWeight="bold" fontSize="sm" display={{ base: "none", md: "flex" }}>
|
||||
Open Documentation
|
||||
</Text>
|
||||
</HStack>
|
||||
<Divider />
|
||||
|
||||
194
app/src/components/requestLogs/AddToDatasetButton.tsx
Normal file
194
app/src/components/requestLogs/AddToDatasetButton.tsx
Normal file
@@ -0,0 +1,194 @@
|
||||
import { useState, useEffect, useMemo } from "react";
|
||||
import {
|
||||
Modal,
|
||||
ModalOverlay,
|
||||
ModalContent,
|
||||
ModalHeader,
|
||||
ModalCloseButton,
|
||||
ModalBody,
|
||||
ModalFooter,
|
||||
HStack,
|
||||
VStack,
|
||||
Icon,
|
||||
Text,
|
||||
Button,
|
||||
Flex,
|
||||
Input,
|
||||
useDisclosure,
|
||||
type UseDisclosureReturn,
|
||||
Checkbox,
|
||||
} from "@chakra-ui/react";
|
||||
import { FiPlusSquare } from "react-icons/fi";
|
||||
|
||||
import { useDatasets, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { api } from "~/utils/api";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import ActionButton from "../ActionButton";
|
||||
import InputDropdown from "../InputDropdown";
|
||||
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
|
||||
import { useRouter } from "next/router";
|
||||
|
||||
const AddToDatasetButton = () => {
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
return (
|
||||
<>
|
||||
<ActionButton
|
||||
onClick={disclosure.onOpen}
|
||||
label="Add to Dataset"
|
||||
icon={FiPlusSquare}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
requireBeta
|
||||
/>
|
||||
<AddToDatasetModal disclosure={disclosure} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default AddToDatasetButton;
|
||||
|
||||
const AddToDatasetModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
const selectedProjectId = useAppStore((s) => s.selectedProjectId);
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
const clearSelectedLogIds = useAppStore((s) => s.selectedLogs.clearSelectedLogIds);
|
||||
const router = useRouter();
|
||||
|
||||
const datasets = useDatasets().data;
|
||||
|
||||
const existingDatasetOptions = useMemo(
|
||||
() =>
|
||||
datasets?.length
|
||||
? datasets.map((d) => ({ label: d.name, id: d.id }))
|
||||
: [{ label: "", id: "" }],
|
||||
[datasets],
|
||||
);
|
||||
|
||||
const [selectedDatasetOption, setSelectedDatasetOption] = useState(existingDatasetOptions?.[0]);
|
||||
const [newDatasetName, setNewDatasetName] = useState("");
|
||||
const [createNewDataset, setCreateNewDataset] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (disclosure.isOpen) {
|
||||
setSelectedDatasetOption(existingDatasetOptions?.[0]);
|
||||
setCreateNewDataset(!existingDatasetOptions[0]?.id);
|
||||
}
|
||||
}, [disclosure.isOpen, existingDatasetOptions]);
|
||||
|
||||
const createDatasetEntriesMutation = api.datasetEntries.create.useMutation();
|
||||
|
||||
const [addToDataset, addingInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (
|
||||
!selectedProjectId ||
|
||||
!selectedLogIds.size ||
|
||||
!(createNewDataset ? newDatasetName : selectedDatasetOption?.id)
|
||||
)
|
||||
return;
|
||||
const datasetParams = createNewDataset
|
||||
? { newDatasetParams: { projectId: selectedProjectId, name: newDatasetName } }
|
||||
: { datasetId: selectedDatasetOption?.id };
|
||||
const response = await createDatasetEntriesMutation.mutateAsync({
|
||||
loggedCallIds: Array.from(selectedLogIds),
|
||||
...datasetParams,
|
||||
});
|
||||
|
||||
if (maybeReportError(response)) return;
|
||||
|
||||
const datasetId = response.payload;
|
||||
|
||||
await router.push({ pathname: "/datasets/[id]", query: { id: datasetId } });
|
||||
|
||||
disclosure.onClose();
|
||||
clearSelectedLogIds();
|
||||
}, [
|
||||
selectedProjectId,
|
||||
selectedLogIds,
|
||||
createNewDataset,
|
||||
selectedDatasetOption?.id,
|
||||
newDatasetName,
|
||||
router,
|
||||
]);
|
||||
|
||||
return (
|
||||
<Modal size={{ base: "xl", md: "2xl" }} {...disclosure}>
|
||||
<ModalOverlay />
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={FiPlusSquare} />
|
||||
<Text>Add to Dataset</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
<ModalCloseButton />
|
||||
<ModalBody maxW="unset">
|
||||
<VStack w="full" spacing={8} pt={4} alignItems="flex-start">
|
||||
<Text>
|
||||
We'll add the <b>{selectedLogIds.size}</b> logs you have selected to the dataset you
|
||||
choose.
|
||||
</Text>
|
||||
<VStack alignItems="flex-start" spacing={4}>
|
||||
{existingDatasetOptions?.length && selectedDatasetOption && (
|
||||
<Flex
|
||||
flexDir={{ base: "column", md: "row" }}
|
||||
alignItems={{ base: "flex-start", md: "center" }}
|
||||
>
|
||||
<Text fontWeight="bold" w={48}>
|
||||
Dataset:
|
||||
</Text>
|
||||
<InputDropdown
|
||||
options={existingDatasetOptions}
|
||||
selectedOption={selectedDatasetOption}
|
||||
getDisplayLabel={(option) => option.label}
|
||||
onSelect={(option) => setSelectedDatasetOption(option)}
|
||||
inputGroupProps={{ w: 48 }}
|
||||
isDisabled={createNewDataset}
|
||||
/>
|
||||
<Checkbox
|
||||
isChecked={createNewDataset}
|
||||
onChange={(e) => setCreateNewDataset(e.target.checked)}
|
||||
paddingLeft={4}
|
||||
isDisabled={!existingDatasetOptions[0]?.id}
|
||||
>
|
||||
<Text>Create New Dataset</Text>
|
||||
</Checkbox>
|
||||
</Flex>
|
||||
)}
|
||||
|
||||
{createNewDataset && (
|
||||
<Flex
|
||||
flexDir={{ base: "column", md: "row" }}
|
||||
alignItems={{ base: "flex-start", md: "center" }}
|
||||
>
|
||||
<Text w={48} fontWeight="bold">
|
||||
Dataset Name:
|
||||
</Text>
|
||||
<Input
|
||||
w={48}
|
||||
value={newDatasetName}
|
||||
onChange={(e) => setNewDatasetName(e.target.value)}
|
||||
/>
|
||||
</Flex>
|
||||
)}
|
||||
</VStack>
|
||||
</VStack>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<HStack>
|
||||
<Button colorScheme="gray" onClick={disclosure.onClose} minW={24}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
colorScheme="blue"
|
||||
onClick={addToDataset}
|
||||
isLoading={addingInProgress}
|
||||
minW={24}
|
||||
>
|
||||
Add
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
@@ -17,7 +17,7 @@ import { useMemo } from "react";
|
||||
import { useIsClientRehydrated, useTagNames } from "~/utils/hooks";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { StaticColumnKeys } from "~/state/columnVisiblitySlice";
|
||||
import ActionButton from "./ActionButton";
|
||||
import ActionButton from "../ActionButton";
|
||||
|
||||
const ColumnVisiblityDropdown = () => {
|
||||
const tagNames = useTagNames().data;
|
||||
|
||||
@@ -28,7 +28,7 @@ import { BiExport } from "react-icons/bi";
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { api } from "~/utils/api";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import ActionButton from "./ActionButton";
|
||||
import ActionButton from "../ActionButton";
|
||||
import InputDropdown from "../InputDropdown";
|
||||
import { FiChevronUp, FiChevronDown } from "react-icons/fi";
|
||||
import InfoCircle from "../InfoCircle";
|
||||
@@ -81,7 +81,7 @@ const ExportLogsModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) =>
|
||||
return;
|
||||
const response = await exportLogsMutation.mutateAsync({
|
||||
projectId: selectedProjectId,
|
||||
selectedLogIds: Array.from(selectedLogIds),
|
||||
loggedCallIds: Array.from(selectedLogIds),
|
||||
testingSplit,
|
||||
selectedExportFormat,
|
||||
removeDuplicates,
|
||||
|
||||
@@ -9,17 +9,14 @@ import {
|
||||
Collapse,
|
||||
HStack,
|
||||
VStack,
|
||||
Button,
|
||||
ButtonGroup,
|
||||
Text,
|
||||
Checkbox,
|
||||
Link as ChakraLink,
|
||||
} from "@chakra-ui/react";
|
||||
import Link from "next/link";
|
||||
|
||||
import dayjs from "~/utils/dayjs";
|
||||
import { type RouterOutputs } from "~/utils/api";
|
||||
import { FormattedJson } from "./FormattedJson";
|
||||
import { FormattedJson } from "../FormattedJson";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { useIsClientRehydrated, useLoggedCalls, useTagNames } from "~/utils/hooks";
|
||||
import { useMemo } from "react";
|
||||
@@ -176,23 +173,16 @@ export const TableRow = ({
|
||||
<Tr>
|
||||
<Td colSpan={visibleColumns.size + 1} w="full" p={0}>
|
||||
<Collapse in={isExpanded} unmountOnExit={true}>
|
||||
<VStack p={4} align="stretch">
|
||||
<HStack align="stretch">
|
||||
<VStack flex={1} align="stretch">
|
||||
<Heading size="sm">Input</Heading>
|
||||
<FormattedJson json={loggedCall.modelResponse?.reqPayload} />
|
||||
</VStack>
|
||||
<VStack flex={1} align="stretch">
|
||||
<Heading size="sm">Output</Heading>
|
||||
<FormattedJson json={loggedCall.modelResponse?.respPayload} />
|
||||
</VStack>
|
||||
</HStack>
|
||||
<ButtonGroup alignSelf="flex-end">
|
||||
<Button as={Link} colorScheme="blue" href={{ pathname: "/experiments" }}>
|
||||
Experiments
|
||||
</Button>
|
||||
</ButtonGroup>
|
||||
</VStack>
|
||||
<HStack align="stretch" p={4}>
|
||||
<VStack flex={1} align="stretch">
|
||||
<Heading size="sm">Input</Heading>
|
||||
<FormattedJson json={loggedCall.modelResponse?.reqPayload} />
|
||||
</VStack>
|
||||
<VStack flex={1} align="stretch">
|
||||
<Heading size="sm">Output</Heading>
|
||||
<FormattedJson json={loggedCall.modelResponse?.respPayload} />
|
||||
</VStack>
|
||||
</HStack>
|
||||
</Collapse>
|
||||
</Td>
|
||||
</Tr>
|
||||
|
||||
@@ -26,6 +26,9 @@ export const env = createEnv({
|
||||
SMTP_PORT: z.string().default("placeholder"),
|
||||
SMTP_LOGIN: z.string().default("placeholder"),
|
||||
SMTP_PASSWORD: z.string().default("placeholder"),
|
||||
AZURE_STORAGE_ACCOUNT_NAME: z.string().default("placeholder"),
|
||||
AZURE_STORAGE_ACCOUNT_KEY: z.string().default("placeholder"),
|
||||
AZURE_STORAGE_CONTAINER_NAME: z.string().default("placeholder"),
|
||||
WORKER_CONCURRENCY: z
|
||||
.string()
|
||||
.default("10")
|
||||
@@ -72,6 +75,9 @@ export const env = createEnv({
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_LOGIN: process.env.SMTP_LOGIN,
|
||||
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
|
||||
AZURE_STORAGE_ACCOUNT_NAME: process.env.AZURE_STORAGE_ACCOUNT_NAME,
|
||||
AZURE_STORAGE_ACCOUNT_KEY: process.env.AZURE_STORAGE_ACCOUNT_KEY,
|
||||
AZURE_STORAGE_CONTAINER_NAME: process.env.AZURE_STORAGE_CONTAINER_NAME,
|
||||
WORKER_CONCURRENCY: process.env.WORKER_CONCURRENCY,
|
||||
WORKER_MAX_POOL_SIZE: process.env.WORKER_MAX_POOL_SIZE,
|
||||
},
|
||||
|
||||
@@ -14,7 +14,7 @@ export async function getCompletion(
|
||||
let finalCompletion: ChatCompletion | null = null;
|
||||
|
||||
try {
|
||||
if (onStream) {
|
||||
if (onStream && !input.function_call) {
|
||||
const resp = await openai.chat.completions.create(
|
||||
{
|
||||
...input,
|
||||
|
||||
@@ -42,24 +42,21 @@ const modelProvider: OpenaiChatModelProvider = {
|
||||
canStream: true,
|
||||
getCompletion,
|
||||
getUsage: (input, output) => {
|
||||
if (output.choices.length === 0) return null;
|
||||
|
||||
const model = modelProvider.getModel(input);
|
||||
if (!model) return null;
|
||||
|
||||
let inputTokens: number;
|
||||
let outputTokens: number;
|
||||
|
||||
if (output.usage) {
|
||||
if (output?.usage) {
|
||||
inputTokens = output.usage.prompt_tokens;
|
||||
outputTokens = output.usage.completion_tokens;
|
||||
} else {
|
||||
try {
|
||||
inputTokens = countOpenAIChatTokens(model, input.messages);
|
||||
outputTokens = countOpenAIChatTokens(
|
||||
model,
|
||||
output.choices.map((c) => c.message).filter(truthyFilter),
|
||||
);
|
||||
outputTokens = output
|
||||
? countOpenAIChatTokens(model, output.choices.map((c) => c.message).filter(truthyFilter))
|
||||
: 0;
|
||||
} catch (err) {
|
||||
inputTokens = 0;
|
||||
outputTokens = 0;
|
||||
|
||||
@@ -59,7 +59,7 @@ export type ModelProvider<SupportedModels extends string, InputSchema, OutputSch
|
||||
) => Promise<CompletionResponse<OutputSchema>>;
|
||||
getUsage: (
|
||||
input: InputSchema,
|
||||
output: OutputSchema,
|
||||
output?: OutputSchema,
|
||||
) => { gpuRuntime?: number; inputTokens?: number; outputTokens?: number; cost?: number } | null;
|
||||
|
||||
// This is just a convenience for type inference, don't use it at runtime
|
||||
|
||||
121
app/src/pages/datasets/[id].tsx
Normal file
121
app/src/pages/datasets/[id].tsx
Normal file
@@ -0,0 +1,121 @@
|
||||
import {
|
||||
Breadcrumb,
|
||||
BreadcrumbItem,
|
||||
Center,
|
||||
Flex,
|
||||
Icon,
|
||||
Input,
|
||||
VStack,
|
||||
HStack,
|
||||
useDisclosure,
|
||||
} from "@chakra-ui/react";
|
||||
import Link from "next/link";
|
||||
import { useState, useEffect } from "react";
|
||||
import { AiOutlineDatabase } from "react-icons/ai";
|
||||
|
||||
import AppShell from "~/components/nav/AppShell";
|
||||
import { api } from "~/utils/api";
|
||||
import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import PageHeaderContainer from "~/components/nav/PageHeaderContainer";
|
||||
import ProjectBreadcrumbContents from "~/components/nav/ProjectBreadcrumbContents";
|
||||
import DatasetConfigurationDrawer from "~/components/datasets/DatasetConfigurationDrawer/DatasetConfigurationDrawer";
|
||||
import { DatasetHeaderButtons } from "~/components/datasets/DatasetHeaderButtons";
|
||||
import DatasetEntriesTable from "~/components/datasets/DatasetEntriesTable/DatasetEntriesTable";
|
||||
import DatasetEntryPaginator from "~/components/datasets/DatasetEntryPaginator";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import FineTuneButton from "~/components/datasets/FineTuneButton";
|
||||
import ExperimentButton from "~/components/datasets/ExperimentButton";
|
||||
import UploadDataButton from "~/components/datasets/UploadDataButton";
|
||||
// import DownloadButton from "~/components/datasets/DownloadButton";
|
||||
import DeleteButton from "~/components/datasets/DeleteButton";
|
||||
import FileUploadsCard from "~/components/datasets/FileUploadsCard";
|
||||
|
||||
export default function Dataset() {
|
||||
const utils = api.useContext();
|
||||
|
||||
const dataset = useDataset();
|
||||
|
||||
const drawerDisclosure = useDisclosure();
|
||||
const [name, setName] = useState(dataset.data?.name || "");
|
||||
useEffect(() => {
|
||||
setName(dataset.data?.name || "");
|
||||
}, [dataset.data?.name]);
|
||||
|
||||
useEffect(() => {
|
||||
useAppStore.getState().sharedArgumentsEditor.loadMonaco().catch(console.error);
|
||||
}, []);
|
||||
|
||||
const updateMutation = api.datasets.update.useMutation();
|
||||
const [onSaveName] = useHandledAsyncCallback(async () => {
|
||||
if (name && name !== dataset.data?.name && dataset.data?.id) {
|
||||
await updateMutation.mutateAsync({
|
||||
id: dataset.data.id,
|
||||
name,
|
||||
});
|
||||
await Promise.all([utils.datasets.list.invalidate(), utils.datasets.get.invalidate()]);
|
||||
}
|
||||
}, [updateMutation, dataset.data?.id, dataset.data?.name, name]);
|
||||
|
||||
if (!dataset.isLoading && !dataset.data) {
|
||||
return (
|
||||
<AppShell title="Dataset not found">
|
||||
<Center h="100%">
|
||||
<div>Dataset not found 😕</div>
|
||||
</Center>
|
||||
</AppShell>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<AppShell title={dataset.data?.name}>
|
||||
<VStack h="full" overflowY="scroll">
|
||||
<PageHeaderContainer>
|
||||
<Breadcrumb>
|
||||
<BreadcrumbItem>
|
||||
<ProjectBreadcrumbContents projectName={dataset.data?.project?.name} />
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbItem>
|
||||
<Link href="/datasets">
|
||||
<Flex alignItems="center" _hover={{ textDecoration: "underline" }}>
|
||||
<Icon as={AiOutlineDatabase} boxSize={4} mr={2} /> Datasets
|
||||
</Flex>
|
||||
</Link>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbItem isCurrentPage>
|
||||
<Input
|
||||
size="sm"
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
onBlur={onSaveName}
|
||||
borderWidth={1}
|
||||
borderColor="transparent"
|
||||
fontSize={16}
|
||||
px={0}
|
||||
minW={{ base: 100, lg: 300 }}
|
||||
flex={1}
|
||||
_hover={{ borderColor: "gray.300" }}
|
||||
_focus={{ borderColor: "blue.500", outline: "none" }}
|
||||
/>
|
||||
</BreadcrumbItem>
|
||||
</Breadcrumb>
|
||||
<DatasetHeaderButtons openDrawer={drawerDisclosure.onOpen} />
|
||||
</PageHeaderContainer>
|
||||
<VStack px={8} py={8} alignItems="flex-start" spacing={4} w="full">
|
||||
<HStack w="full" justifyContent="flex-end">
|
||||
<FineTuneButton />
|
||||
<UploadDataButton />
|
||||
<ExperimentButton />
|
||||
{/* <DownloadButton /> */}
|
||||
<DeleteButton />
|
||||
</HStack>
|
||||
<DatasetEntriesTable />
|
||||
<DatasetEntryPaginator />
|
||||
</VStack>
|
||||
</VStack>
|
||||
<FileUploadsCard />
|
||||
</AppShell>
|
||||
<DatasetConfigurationDrawer disclosure={drawerDisclosure} />
|
||||
</>
|
||||
);
|
||||
}
|
||||
17
app/src/pages/datasets/index.tsx
Normal file
17
app/src/pages/datasets/index.tsx
Normal file
@@ -0,0 +1,17 @@
|
||||
import { VStack, Text, Divider } from "@chakra-ui/react";
|
||||
import AppShell from "~/components/nav/AppShell";
|
||||
import DatasetsTable from "~/components/datasets/DatasetsTable";
|
||||
|
||||
export default function DatasetsPage() {
|
||||
return (
|
||||
<AppShell title="Datasets" requireAuth>
|
||||
<VStack w="full" py={8} px={8} spacing={4} alignItems="flex-start">
|
||||
<Text fontSize="2xl" fontWeight="bold">
|
||||
Datasets
|
||||
</Text>
|
||||
<Divider />
|
||||
<DatasetsTable />
|
||||
</VStack>
|
||||
</AppShell>
|
||||
);
|
||||
}
|
||||
@@ -8,26 +8,25 @@ import {
|
||||
Input,
|
||||
Text,
|
||||
VStack,
|
||||
useDisclosure,
|
||||
} from "@chakra-ui/react";
|
||||
import Link from "next/link";
|
||||
|
||||
import { useRouter } from "next/router";
|
||||
import { useState, useEffect } from "react";
|
||||
import { RiFlaskLine } from "react-icons/ri";
|
||||
import OutputsTable from "~/components/OutputsTable";
|
||||
import ExperimentSettingsDrawer from "~/components/ExperimentSettingsDrawer/ExperimentSettingsDrawer";
|
||||
import ExperimentSettingsDrawer from "~/components/experiments/ExperimentSettingsDrawer/ExperimentSettingsDrawer";
|
||||
import { ExperimentHeaderButtons } from "~/components/experiments/ExperimentHeaderButtons/ExperimentHeaderButtons";
|
||||
import AppShell from "~/components/nav/AppShell";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { useSyncVariantEditor } from "~/state/sync";
|
||||
import { ExperimentHeaderButtons } from "~/components/experiments/ExperimentHeaderButtons/ExperimentHeaderButtons";
|
||||
import Head from "next/head";
|
||||
import PageHeaderContainer from "~/components/nav/PageHeaderContainer";
|
||||
import ProjectBreadcrumbContents from "~/components/nav/ProjectBreadcrumbContents";
|
||||
|
||||
export default function Experiment() {
|
||||
const router = useRouter();
|
||||
const utils = api.useContext();
|
||||
useSyncVariantEditor();
|
||||
|
||||
@@ -44,6 +43,7 @@ export default function Experiment() {
|
||||
useAppStore.getState().sharedVariantEditor.loadMonaco().catch(console.error);
|
||||
}, []);
|
||||
|
||||
const drawerDisclosure = useDisclosure();
|
||||
const [label, setLabel] = useState(experiment.data?.label || "");
|
||||
useEffect(() => {
|
||||
setLabel(experiment.data?.label || "");
|
||||
@@ -121,11 +121,11 @@ export default function Experiment() {
|
||||
)}
|
||||
</BreadcrumbItem>
|
||||
</Breadcrumb>
|
||||
<ExperimentHeaderButtons />
|
||||
<ExperimentHeaderButtons openDrawer={drawerDisclosure.onOpen} />
|
||||
</PageHeaderContainer>
|
||||
<ExperimentSettingsDrawer />
|
||||
<ExperimentSettingsDrawer disclosure={drawerDisclosure} />
|
||||
<Box w="100%" overflowX="auto" flex={1} id="output-container">
|
||||
<OutputsTable experimentId={experiment.data?.id} />
|
||||
<OutputsTable experimentId={experiment.data?.id} openDrawer={drawerDisclosure.onOpen} />
|
||||
</Box>
|
||||
</VStack>
|
||||
</AppShell>
|
||||
|
||||
@@ -4,14 +4,13 @@ import { Text, VStack, Divider, HStack, Box } from "@chakra-ui/react";
|
||||
import AppShell from "~/components/nav/AppShell";
|
||||
import LoggedCallTable from "~/components/requestLogs/LoggedCallsTable";
|
||||
import LoggedCallsPaginator from "~/components/requestLogs/LoggedCallsPaginator";
|
||||
import ActionButton from "~/components/requestLogs/ActionButton";
|
||||
import ActionButton from "~/components/ActionButton";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { RiFlaskLine } from "react-icons/ri";
|
||||
import { FiFilter } from "react-icons/fi";
|
||||
import LogFilters from "~/components/requestLogs/LogFilters/LogFilters";
|
||||
import ColumnVisiblityDropdown from "~/components/requestLogs/ColumnVisiblityDropdown";
|
||||
import FineTuneButton from "~/components/requestLogs/FineTuneButton";
|
||||
import ExportButton from "~/components/requestLogs/ExportButton";
|
||||
import AddToDatasetButton from "~/components/requestLogs/AddToDatasetButton";
|
||||
|
||||
export default function LoggedCalls() {
|
||||
const selectedLogIds = useAppStore((s) => s.selectedLogs.selectedLogIds);
|
||||
@@ -27,16 +26,7 @@ export default function LoggedCalls() {
|
||||
</Text>
|
||||
<Divider />
|
||||
<HStack w="full" justifyContent="flex-end">
|
||||
<FineTuneButton />
|
||||
<ActionButton
|
||||
onClick={() => {
|
||||
console.log("experimenting with these ids", selectedLogIds);
|
||||
}}
|
||||
label="Experiment"
|
||||
icon={RiFlaskLine}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
requireBeta
|
||||
/>
|
||||
<AddToDatasetButton />
|
||||
<ExportButton />
|
||||
<ColumnVisiblityDropdown />
|
||||
<ActionButton
|
||||
|
||||
4
app/src/server/api/external/v1Api.router.ts
vendored
4
app/src/server/api/external/v1Api.router.ts
vendored
@@ -119,10 +119,10 @@ export const v1ApiRouter = createOpenApiRouter({
|
||||
|
||||
let usage;
|
||||
let model;
|
||||
if (reqPayload.success && respPayload.success) {
|
||||
if (reqPayload.success) {
|
||||
usage = modelProvider.getUsage(
|
||||
input.reqPayload as CompletionCreateParams,
|
||||
input.respPayload as ChatCompletion,
|
||||
respPayload.success ? (input.respPayload as ChatCompletion) : undefined,
|
||||
);
|
||||
model = reqPayload.data.model;
|
||||
}
|
||||
|
||||
@@ -9,6 +9,8 @@ import { worldChampsRouter } from "./routers/worldChamps.router";
|
||||
import { projectsRouter } from "./routers/projects.router";
|
||||
import { dashboardRouter } from "./routers/dashboard.router";
|
||||
import { loggedCallsRouter } from "./routers/loggedCalls.router";
|
||||
import { datasetsRouter } from "./routers/datasets.router";
|
||||
import { datasetEntriesRouter } from "./routers/datasetEntries.router";
|
||||
import { fineTunesRouter } from "./routers/fineTunes.router";
|
||||
import { usersRouter } from "./routers/users.router";
|
||||
import { adminJobsRouter } from "./routers/adminJobs.router";
|
||||
@@ -29,6 +31,8 @@ export const appRouter = createTRPCRouter({
|
||||
projects: projectsRouter,
|
||||
dashboard: dashboardRouter,
|
||||
loggedCalls: loggedCallsRouter,
|
||||
datasets: datasetsRouter,
|
||||
datasetEntries: datasetEntriesRouter,
|
||||
fineTunes: fineTunesRouter,
|
||||
users: usersRouter,
|
||||
adminJobs: adminJobsRouter,
|
||||
|
||||
337
app/src/server/api/routers/datasetEntries.router.ts
Normal file
337
app/src/server/api/routers/datasetEntries.router.ts
Normal file
@@ -0,0 +1,337 @@
|
||||
import { z } from "zod";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import {
|
||||
type ChatCompletion,
|
||||
type CompletionCreateParams,
|
||||
type CreateChatCompletionRequestMessage,
|
||||
} from "openai/resources/chat";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
import archiver from "archiver";
|
||||
import { WritableStreamBuffer } from "stream-buffers";
|
||||
|
||||
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
|
||||
import { prisma } from "~/server/db";
|
||||
import { requireCanModifyProject, requireCanViewProject } from "~/utils/accessControl";
|
||||
import { error, success } from "~/utils/errorHandling/standardResponses";
|
||||
import { countOpenAIChatTokens } from "~/utils/countTokens";
|
||||
import { type TrainingRow } from "~/components/datasets/validateTrainingRows";
|
||||
import hashObject from "~/server/utils/hashObject";
|
||||
import { type JsonValue } from "type-fest";
|
||||
import { formatEntriesFromTrainingRows } from "~/server/utils/createEntriesFromTrainingRows";
|
||||
|
||||
export const datasetEntriesRouter = createTRPCRouter({
|
||||
list: protectedProcedure
|
||||
.input(z.object({ datasetId: z.string(), page: z.number(), pageSize: z.number() }))
|
||||
.query(async ({ input, ctx }) => {
|
||||
const { datasetId, page, pageSize } = input;
|
||||
|
||||
const { projectId } = await prisma.dataset.findUniqueOrThrow({
|
||||
where: { id: datasetId },
|
||||
});
|
||||
await requireCanViewProject(projectId, ctx);
|
||||
|
||||
const [entries, matchingEntries, trainingCount, testingCount] = await prisma.$transaction([
|
||||
prisma.datasetEntry.findMany({
|
||||
where: {
|
||||
datasetId: datasetId,
|
||||
},
|
||||
orderBy: [{ createdAt: "desc" }, { id: "desc" }],
|
||||
skip: (page - 1) * pageSize,
|
||||
take: pageSize,
|
||||
}),
|
||||
prisma.datasetEntry.findMany({
|
||||
where: {
|
||||
datasetId: datasetId,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
}),
|
||||
prisma.datasetEntry.count({
|
||||
where: {
|
||||
datasetId: datasetId,
|
||||
type: "TRAIN",
|
||||
},
|
||||
}),
|
||||
prisma.datasetEntry.count({
|
||||
where: {
|
||||
datasetId: datasetId,
|
||||
type: "TEST",
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
return {
|
||||
entries,
|
||||
matchingEntryIds: matchingEntries.map((entry) => entry.id),
|
||||
trainingCount,
|
||||
testingCount,
|
||||
};
|
||||
}),
|
||||
get: protectedProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
|
||||
const entry = await prisma.datasetEntry.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
dataset: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!entry.dataset) {
|
||||
throw new TRPCError({ message: "Dataset not found for dataset entry", code: "NOT_FOUND" });
|
||||
}
|
||||
|
||||
await requireCanViewProject(entry.dataset.projectId, ctx);
|
||||
|
||||
if (!entry) {
|
||||
throw new TRPCError({ message: "Dataset entry not found", code: "NOT_FOUND" });
|
||||
}
|
||||
|
||||
return entry;
|
||||
}),
|
||||
create: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
datasetId: z.string().optional(),
|
||||
newDatasetParams: z
|
||||
.object({
|
||||
projectId: z.string(),
|
||||
name: z.string(),
|
||||
})
|
||||
.optional(),
|
||||
loggedCallIds: z.string().array().optional(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
let datasetId: string;
|
||||
let trainingRatio = 0.8;
|
||||
if (input.datasetId) {
|
||||
datasetId = input.datasetId;
|
||||
const { projectId, trainingRatio: datasetTrainingRatio } =
|
||||
await prisma.dataset.findUniqueOrThrow({
|
||||
where: { id: input.datasetId },
|
||||
});
|
||||
trainingRatio = datasetTrainingRatio;
|
||||
await requireCanModifyProject(projectId, ctx);
|
||||
} else if (input.newDatasetParams) {
|
||||
await requireCanModifyProject(input.newDatasetParams.projectId, ctx);
|
||||
datasetId = uuidv4();
|
||||
} else {
|
||||
return error("No datasetId or newDatasetParams provided");
|
||||
}
|
||||
|
||||
if (!input.loggedCallIds) {
|
||||
return error("No loggedCallIds provided");
|
||||
}
|
||||
|
||||
const loggedCalls = await prisma.loggedCall.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: input.loggedCallIds,
|
||||
},
|
||||
modelResponse: {
|
||||
isNot: null,
|
||||
},
|
||||
},
|
||||
include: {
|
||||
modelResponse: {
|
||||
select: {
|
||||
reqPayload: true,
|
||||
respPayload: true,
|
||||
inputTokens: true,
|
||||
outputTokens: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { createdAt: "desc" },
|
||||
});
|
||||
|
||||
const trainingRows = loggedCalls.map((loggedCall) => {
|
||||
const inputMessages = (
|
||||
loggedCall.modelResponse?.reqPayload as unknown as CompletionCreateParams
|
||||
).messages;
|
||||
let output: ChatCompletion.Choice.Message | undefined = undefined;
|
||||
const resp = loggedCall.modelResponse?.respPayload as unknown as ChatCompletion | undefined;
|
||||
if (resp && resp.choices?.[0]) {
|
||||
output = resp.choices[0].message;
|
||||
}
|
||||
return {
|
||||
input: inputMessages as unknown as CreateChatCompletionRequestMessage[],
|
||||
output: output as unknown as CreateChatCompletionRequestMessage,
|
||||
};
|
||||
});
|
||||
|
||||
const datasetEntriesToCreate = await formatEntriesFromTrainingRows(datasetId, trainingRows);
|
||||
|
||||
// Ensure dataset and dataset entries are created atomically
|
||||
await prisma.$transaction([
|
||||
prisma.dataset.upsert({
|
||||
where: { id: datasetId },
|
||||
update: {},
|
||||
create: {
|
||||
id: datasetId,
|
||||
projectId: input.newDatasetParams?.projectId ?? "",
|
||||
name: input.newDatasetParams?.name ?? "",
|
||||
trainingRatio,
|
||||
},
|
||||
}),
|
||||
prisma.datasetEntry.createMany({
|
||||
data: datasetEntriesToCreate,
|
||||
}),
|
||||
]);
|
||||
|
||||
return success(datasetId);
|
||||
}),
|
||||
update: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
updates: z.object({
|
||||
type: z.enum(["TRAIN", "TEST"]).optional(),
|
||||
input: z.string().optional(),
|
||||
output: z.string().optional(),
|
||||
}),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { dataset } = await prisma.datasetEntry.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
dataset: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!dataset) {
|
||||
return error("Dataset not found for dataset entry");
|
||||
}
|
||||
|
||||
await requireCanModifyProject(dataset.projectId, ctx);
|
||||
|
||||
let parsedInput = undefined;
|
||||
let inputTokens = undefined;
|
||||
if (input.updates.input) {
|
||||
parsedInput = JSON.parse(input.updates.input);
|
||||
inputTokens = countOpenAIChatTokens(
|
||||
"gpt-4-0613",
|
||||
parsedInput as unknown as CreateChatCompletionRequestMessage[],
|
||||
);
|
||||
}
|
||||
|
||||
let parsedOutput = undefined;
|
||||
let outputTokens = undefined;
|
||||
// The client might send "null" as a string, so we need to check for that
|
||||
if (input.updates.output && input.updates.output !== "null") {
|
||||
parsedOutput = JSON.parse(input.updates.output);
|
||||
outputTokens = countOpenAIChatTokens("gpt-4-0613", [
|
||||
parsedOutput as unknown as ChatCompletion.Choice.Message,
|
||||
]);
|
||||
}
|
||||
|
||||
await prisma.datasetEntry.update({
|
||||
where: { id: input.id },
|
||||
data: {
|
||||
type: input.updates.type,
|
||||
input: parsedInput,
|
||||
output: parsedOutput,
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
},
|
||||
});
|
||||
|
||||
return success("Dataset entry updated");
|
||||
}),
|
||||
|
||||
delete: protectedProcedure
|
||||
.input(z.object({ ids: z.string().array() }))
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
if (input.ids.length === 0) {
|
||||
return error("No ids provided");
|
||||
}
|
||||
const { dataset } = await prisma.datasetEntry.findUniqueOrThrow({
|
||||
where: { id: input.ids[0] },
|
||||
include: {
|
||||
dataset: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!dataset) {
|
||||
return error("Dataset not found for dataset entry");
|
||||
}
|
||||
|
||||
await requireCanModifyProject(dataset.projectId, ctx);
|
||||
|
||||
await prisma.datasetEntry.deleteMany({
|
||||
where: {
|
||||
id: {
|
||||
in: input.ids,
|
||||
},
|
||||
datasetId: dataset?.id,
|
||||
},
|
||||
});
|
||||
|
||||
return success("Dataset entries deleted");
|
||||
}),
|
||||
|
||||
export: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
datasetId: z.string(),
|
||||
datasetEntryIds: z.string().array(),
|
||||
testingSplit: z.number(),
|
||||
removeDuplicates: z.boolean(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { projectId } = await prisma.dataset.findUniqueOrThrow({
|
||||
where: { id: input.datasetId },
|
||||
});
|
||||
await requireCanViewProject(projectId, ctx);
|
||||
|
||||
const datasetEntries = await ctx.prisma.datasetEntry.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: input.datasetEntryIds,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let rows: TrainingRow[] = datasetEntries.map((entry) => ({
|
||||
input: entry.input as unknown as CreateChatCompletionRequestMessage[],
|
||||
output: entry.output as unknown as CreateChatCompletionRequestMessage,
|
||||
}));
|
||||
|
||||
if (input.removeDuplicates) {
|
||||
const deduplicatedRows = [];
|
||||
const rowHashSet = new Set<string>();
|
||||
for (const row of rows) {
|
||||
const rowHash = hashObject(row as unknown as JsonValue);
|
||||
if (!rowHashSet.has(rowHash)) {
|
||||
rowHashSet.add(rowHash);
|
||||
deduplicatedRows.push(row);
|
||||
}
|
||||
}
|
||||
rows = deduplicatedRows;
|
||||
}
|
||||
|
||||
const splitIndex = Math.floor((rows.length * input.testingSplit) / 100);
|
||||
|
||||
const testingData = rows.slice(0, splitIndex);
|
||||
const trainingData = rows.slice(splitIndex);
|
||||
|
||||
// Convert arrays to JSONL format
|
||||
const trainingDataJSONL = trainingData.map((item) => JSON.stringify(item)).join("\n");
|
||||
const testingDataJSONL = testingData.map((item) => JSON.stringify(item)).join("\n");
|
||||
|
||||
const output = new WritableStreamBuffer();
|
||||
const archive = archiver("zip");
|
||||
|
||||
archive.pipe(output);
|
||||
archive.append(trainingDataJSONL, { name: "train.jsonl" });
|
||||
archive.append(testingDataJSONL, { name: "test.jsonl" });
|
||||
await archive.finalize();
|
||||
|
||||
// Convert buffer to base64
|
||||
const base64 = output.getContents().toString("base64");
|
||||
|
||||
return base64;
|
||||
}),
|
||||
});
|
||||
183
app/src/server/api/routers/datasets.router.ts
Normal file
183
app/src/server/api/routers/datasets.router.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
|
||||
import { prisma } from "~/server/db";
|
||||
import { requireCanModifyProject, requireCanViewProject } from "~/utils/accessControl";
|
||||
import { error, success } from "~/utils/errorHandling/standardResponses";
|
||||
import { generateServiceClientUrl } from "~/utils/azure/server";
|
||||
import { queueImportDatasetEntries } from "~/server/tasks/importDatasetEntries.task";
|
||||
|
||||
export const datasetsRouter = createTRPCRouter({
|
||||
get: protectedProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
|
||||
const dataset = await prisma.dataset.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
project: true,
|
||||
},
|
||||
});
|
||||
|
||||
await requireCanViewProject(dataset.projectId, ctx);
|
||||
|
||||
return dataset;
|
||||
}),
|
||||
list: protectedProcedure
|
||||
.input(z.object({ projectId: z.string() }))
|
||||
.query(async ({ input, ctx }) => {
|
||||
await requireCanViewProject(input.projectId, ctx);
|
||||
|
||||
return await prisma.dataset.findMany({
|
||||
where: {
|
||||
projectId: input.projectId,
|
||||
},
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
datasetEntries: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { createdAt: "desc" },
|
||||
});
|
||||
}),
|
||||
|
||||
create: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
name: z.string(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
await requireCanModifyProject(input.projectId, ctx);
|
||||
|
||||
const dataset = await prisma.dataset.create({
|
||||
data: {
|
||||
projectId: input.projectId,
|
||||
name: input.name,
|
||||
},
|
||||
});
|
||||
|
||||
return success(dataset);
|
||||
}),
|
||||
|
||||
update: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { projectId } = await prisma.dataset.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
});
|
||||
await requireCanModifyProject(projectId, ctx);
|
||||
|
||||
await prisma.dataset.update({
|
||||
where: { id: input.id },
|
||||
data: {
|
||||
name: input.name,
|
||||
},
|
||||
});
|
||||
|
||||
return success("Dataset updated");
|
||||
}),
|
||||
|
||||
delete: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { projectId } = await prisma.dataset.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
});
|
||||
await requireCanModifyProject(projectId, ctx);
|
||||
|
||||
await prisma.dataset.delete({
|
||||
where: { id: input.id },
|
||||
});
|
||||
|
||||
return success("Dataset deleted");
|
||||
}),
|
||||
getServiceClientUrl: protectedProcedure
|
||||
.input(z.object({ projectId: z.string() }))
|
||||
.query(async ({ input, ctx }) => {
|
||||
// The user must at least be authenticated to get a SAS token
|
||||
await requireCanModifyProject(input.projectId, ctx);
|
||||
return generateServiceClientUrl();
|
||||
}),
|
||||
triggerFileDownload: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
datasetId: z.string(),
|
||||
blobName: z.string(),
|
||||
fileName: z.string(),
|
||||
fileSize: z.number(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { projectId } = await prisma.dataset.findUniqueOrThrow({
|
||||
where: { id: input.datasetId },
|
||||
});
|
||||
await requireCanViewProject(projectId, ctx);
|
||||
|
||||
const { id } = await prisma.datasetFileUpload.create({
|
||||
data: {
|
||||
datasetId: input.datasetId,
|
||||
blobName: input.blobName,
|
||||
status: "PENDING",
|
||||
fileName: input.fileName,
|
||||
fileSize: input.fileSize,
|
||||
uploadedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
await queueImportDatasetEntries(id);
|
||||
}),
|
||||
listFileUploads: protectedProcedure
|
||||
.input(z.object({ datasetId: z.string() }))
|
||||
.query(async ({ input, ctx }) => {
|
||||
const { projectId } = await prisma.dataset.findUniqueOrThrow({
|
||||
where: { id: input.datasetId },
|
||||
});
|
||||
await requireCanViewProject(projectId, ctx);
|
||||
|
||||
return await prisma.datasetFileUpload.findMany({
|
||||
where: {
|
||||
datasetId: input.datasetId,
|
||||
visible: true,
|
||||
},
|
||||
orderBy: { createdAt: "desc" },
|
||||
});
|
||||
}),
|
||||
hideFileUploads: protectedProcedure
|
||||
.input(z.object({ fileUploadIds: z.string().array() }))
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
if (!input.fileUploadIds.length) return error("No file upload ids provided");
|
||||
|
||||
const {
|
||||
dataset: { projectId, id: datasetId },
|
||||
} = await prisma.datasetFileUpload.findUniqueOrThrow({
|
||||
where: { id: input.fileUploadIds[0] },
|
||||
select: {
|
||||
dataset: {
|
||||
select: {
|
||||
id: true,
|
||||
projectId: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
await requireCanModifyProject(projectId, ctx);
|
||||
|
||||
await prisma.datasetFileUpload.updateMany({
|
||||
where: {
|
||||
id: {
|
||||
in: input.fileUploadIds,
|
||||
},
|
||||
datasetId,
|
||||
},
|
||||
data: {
|
||||
visible: false,
|
||||
},
|
||||
});
|
||||
}),
|
||||
});
|
||||
@@ -1,6 +1,4 @@
|
||||
import { z } from "zod";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { type Prisma } from "@prisma/client";
|
||||
|
||||
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
|
||||
import { prisma } from "~/server/db";
|
||||
@@ -55,14 +53,18 @@ export const fineTunesRouter = createTRPCRouter({
|
||||
create: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
selectedLogIds: z.array(z.string()),
|
||||
datasetId: z.string(),
|
||||
slug: z.string(),
|
||||
baseModel: z.string(),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
await requireCanModifyProject(input.projectId, ctx);
|
||||
const { projectId } = await prisma.dataset.findUniqueOrThrow({
|
||||
where: {
|
||||
id: input.datasetId,
|
||||
},
|
||||
});
|
||||
await requireCanModifyProject(projectId, ctx);
|
||||
|
||||
const existingFineTune = await prisma.fineTune.findFirst({
|
||||
where: {
|
||||
@@ -74,39 +76,14 @@ export const fineTunesRouter = createTRPCRouter({
|
||||
return error("A fine tune with that slug already exists");
|
||||
}
|
||||
|
||||
const newDatasetId = uuidv4();
|
||||
|
||||
const datasetEntriesToCreate: Prisma.DatasetEntryCreateManyDatasetInput[] =
|
||||
input.selectedLogIds.map((loggedCallId) => ({
|
||||
loggedCallId,
|
||||
}));
|
||||
|
||||
await prisma.$transaction([
|
||||
prisma.dataset.create({
|
||||
data: {
|
||||
id: newDatasetId,
|
||||
name: input.slug,
|
||||
project: {
|
||||
connect: {
|
||||
id: input.projectId,
|
||||
},
|
||||
},
|
||||
datasetEntries: {
|
||||
createMany: {
|
||||
data: datasetEntriesToCreate,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
prisma.fineTune.create({
|
||||
data: {
|
||||
projectId: input.projectId,
|
||||
slug: input.slug,
|
||||
baseModel: input.baseModel,
|
||||
datasetId: newDatasetId,
|
||||
},
|
||||
}),
|
||||
]);
|
||||
await prisma.fineTune.create({
|
||||
data: {
|
||||
projectId,
|
||||
slug: input.slug,
|
||||
baseModel: input.baseModel,
|
||||
datasetId: input.datasetId,
|
||||
},
|
||||
});
|
||||
|
||||
return success();
|
||||
}),
|
||||
|
||||
@@ -189,7 +189,7 @@ export const loggedCallsRouter = createTRPCRouter({
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
selectedLogIds: z.string().array(),
|
||||
loggedCallIds: z.string().array(),
|
||||
testingSplit: z.number(),
|
||||
selectedExportFormat: z.string(),
|
||||
removeDuplicates: z.boolean(),
|
||||
@@ -203,7 +203,7 @@ export const loggedCallsRouter = createTRPCRouter({
|
||||
where: {
|
||||
originalLoggedCall: {
|
||||
projectId: input.projectId,
|
||||
id: { in: input.selectedLogIds },
|
||||
id: { in: input.loggedCallIds },
|
||||
},
|
||||
statusCode: 200,
|
||||
},
|
||||
|
||||
@@ -93,17 +93,12 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
visible: true,
|
||||
},
|
||||
});
|
||||
const outputCount = await prisma.scenarioVariantCell.count({
|
||||
const finishedCount = await prisma.scenarioVariantCell.count({
|
||||
where: {
|
||||
promptVariantId: input.variantId,
|
||||
testScenario: { visible: true },
|
||||
modelResponses: {
|
||||
some: {
|
||||
outdated: false,
|
||||
respPayload: {
|
||||
not: Prisma.AnyNull,
|
||||
},
|
||||
},
|
||||
retrievalStatus: {
|
||||
in: ["COMPLETE", "ERROR"],
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -131,7 +126,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
const inputTokens = overallTokens._sum?.inputTokens ?? 0;
|
||||
const outputTokens = overallTokens._sum?.outputTokens ?? 0;
|
||||
|
||||
const awaitingCompletions = outputCount < scenarioCount;
|
||||
const awaitingCompletions = finishedCount < scenarioCount;
|
||||
|
||||
const awaitingEvals = !!evalResults.find(
|
||||
(result) => result.totalCount < scenarioCount * evals.length,
|
||||
@@ -143,7 +138,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
outputTokens,
|
||||
overallCost: overallTokens._sum?.cost ?? 0,
|
||||
scenarioCount,
|
||||
outputCount,
|
||||
finishedCount,
|
||||
awaitingCompletions,
|
||||
awaitingEvals,
|
||||
};
|
||||
|
||||
152
app/src/server/tasks/importDatasetEntries.task.ts
Normal file
152
app/src/server/tasks/importDatasetEntries.task.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import { type DatasetFileUpload } from "@prisma/client";
|
||||
import { prisma } from "~/server/db";
|
||||
import defineTask from "./defineTask";
|
||||
import { downloadBlobToString } from "~/utils/azure/server";
|
||||
import {
|
||||
type TrainingRow,
|
||||
validateTrainingRows,
|
||||
parseJSONL,
|
||||
} from "~/components/datasets/validateTrainingRows";
|
||||
import { formatEntriesFromTrainingRows } from "~/server/utils/createEntriesFromTrainingRows";
|
||||
|
||||
export type ImportDatasetEntriesJob = {
|
||||
datasetFileUploadId: string;
|
||||
};
|
||||
|
||||
export const importDatasetEntries = defineTask<ImportDatasetEntriesJob>(
|
||||
"importDatasetEntries",
|
||||
async (task) => {
|
||||
const { datasetFileUploadId } = task;
|
||||
const datasetFileUpload = await prisma.datasetFileUpload.findUnique({
|
||||
where: { id: datasetFileUploadId },
|
||||
});
|
||||
if (!datasetFileUpload) {
|
||||
await prisma.datasetFileUpload.update({
|
||||
where: { id: datasetFileUploadId },
|
||||
data: {
|
||||
errorMessage: "Dataset File Upload not found",
|
||||
status: "ERROR",
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
await prisma.datasetFileUpload.update({
|
||||
where: { id: datasetFileUploadId },
|
||||
data: {
|
||||
status: "DOWNLOADING",
|
||||
progress: 5,
|
||||
},
|
||||
});
|
||||
|
||||
const onBlobDownloadProgress = async (progress: number) => {
|
||||
await prisma.datasetFileUpload.update({
|
||||
where: { id: datasetFileUploadId },
|
||||
data: {
|
||||
progress: 5 + Math.floor((progress / datasetFileUpload.fileSize) * 25),
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const jsonlStr = await downloadBlobToString(datasetFileUpload.blobName, onBlobDownloadProgress);
|
||||
|
||||
let trainingRows: TrainingRow[] = [];
|
||||
let validationError: string | null = null;
|
||||
try {
|
||||
trainingRows = parseJSONL(jsonlStr) as TrainingRow[];
|
||||
validationError = validateTrainingRows(trainingRows);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (e: any) {
|
||||
validationError = e.message;
|
||||
}
|
||||
|
||||
if (validationError) {
|
||||
await prisma.datasetFileUpload.update({
|
||||
where: { id: datasetFileUploadId },
|
||||
data: {
|
||||
errorMessage: `Invalid JSONL: ${validationError}`,
|
||||
status: "ERROR",
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await prisma.datasetFileUpload.update({
|
||||
where: { id: datasetFileUploadId },
|
||||
data: {
|
||||
status: "PROCESSING",
|
||||
progress: 30,
|
||||
},
|
||||
});
|
||||
|
||||
const updatePromises: Promise<DatasetFileUpload>[] = [];
|
||||
|
||||
const updateCallback = async (progress: number) => {
|
||||
await prisma.datasetFileUpload.update({
|
||||
where: { id: datasetFileUploadId },
|
||||
data: {
|
||||
progress: 30 + Math.floor((progress / trainingRows.length) * 69),
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
let datasetEntriesToCreate;
|
||||
try {
|
||||
datasetEntriesToCreate = await formatEntriesFromTrainingRows(
|
||||
datasetFileUpload.datasetId,
|
||||
trainingRows,
|
||||
updateCallback,
|
||||
500,
|
||||
);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (e: any) {
|
||||
await prisma.datasetFileUpload.update({
|
||||
where: { id: datasetFileUploadId },
|
||||
data: {
|
||||
errorMessage: `Error formatting rows: ${e.message as string}`,
|
||||
status: "ERROR",
|
||||
visible: true,
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await Promise.all(updatePromises);
|
||||
|
||||
await prisma.datasetFileUpload.update({
|
||||
where: { id: datasetFileUploadId },
|
||||
data: {
|
||||
status: "SAVING",
|
||||
progress: 99,
|
||||
},
|
||||
});
|
||||
|
||||
await prisma.datasetEntry.createMany({
|
||||
data: datasetEntriesToCreate,
|
||||
});
|
||||
|
||||
await prisma.datasetFileUpload.update({
|
||||
where: { id: datasetFileUploadId },
|
||||
data: {
|
||||
status: "COMPLETE",
|
||||
progress: 100,
|
||||
visible: true,
|
||||
},
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
export const queueImportDatasetEntries = async (datasetFileUploadId: string) => {
|
||||
await Promise.all([
|
||||
prisma.datasetFileUpload.update({
|
||||
where: {
|
||||
id: datasetFileUploadId,
|
||||
},
|
||||
data: {
|
||||
errorMessage: null,
|
||||
status: "PENDING",
|
||||
},
|
||||
}),
|
||||
|
||||
importDatasetEntries.enqueue({ datasetFileUploadId }),
|
||||
]);
|
||||
};
|
||||
@@ -5,10 +5,11 @@ import "../../../sentry.server.config";
|
||||
import { env } from "~/env.mjs";
|
||||
import { queryModel } from "./queryModel.task";
|
||||
import { runNewEval } from "./runNewEval.task";
|
||||
import { importDatasetEntries } from "./importDatasetEntries.task";
|
||||
|
||||
console.log("Starting worker");
|
||||
|
||||
const registeredTasks = [queryModel, runNewEval];
|
||||
const registeredTasks = [queryModel, runNewEval, importDatasetEntries];
|
||||
|
||||
const taskList = registeredTasks.reduce((acc, task) => {
|
||||
acc[task.task.identifier] = task.task.handler;
|
||||
|
||||
70
app/src/server/utils/createEntriesFromTrainingRows.ts
Normal file
70
app/src/server/utils/createEntriesFromTrainingRows.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { type Prisma } from "@prisma/client";
|
||||
import { shuffle } from "lodash-es";
|
||||
import {
|
||||
type CreateChatCompletionRequestMessage,
|
||||
type ChatCompletion,
|
||||
} from "openai/resources/chat";
|
||||
|
||||
import { prisma } from "~/server/db";
|
||||
import { type TrainingRow } from "~/components/datasets/validateTrainingRows";
|
||||
import { countLlamaChatTokens } from "~/utils/countTokens";
|
||||
|
||||
export const formatEntriesFromTrainingRows = async (
|
||||
datasetId: string,
|
||||
trainingRows: TrainingRow[],
|
||||
updateCallback?: (progress: number) => Promise<void>,
|
||||
updateFrequency = 1000,
|
||||
) => {
|
||||
const [dataset, existingTrainingCount, existingTestingCount] = await prisma.$transaction([
|
||||
prisma.dataset.findUnique({ where: { id: datasetId } }),
|
||||
prisma.datasetEntry.count({
|
||||
where: {
|
||||
datasetId,
|
||||
type: "TRAIN",
|
||||
},
|
||||
}),
|
||||
prisma.datasetEntry.count({
|
||||
where: {
|
||||
datasetId,
|
||||
type: "TEST",
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
const trainingRatio = dataset?.trainingRatio ?? 0.8;
|
||||
|
||||
const newTotalEntries = existingTrainingCount + existingTestingCount + trainingRows.length;
|
||||
const numTrainingToAdd = Math.floor(trainingRatio * newTotalEntries) - existingTrainingCount;
|
||||
const numTestingToAdd = trainingRows.length - numTrainingToAdd;
|
||||
const typesToAssign = shuffle([
|
||||
...Array(numTrainingToAdd).fill("TRAIN"),
|
||||
...Array(numTestingToAdd).fill("TEST"),
|
||||
]);
|
||||
const datasetEntriesToCreate: Prisma.DatasetEntryCreateManyInput[] = [];
|
||||
let i = 0;
|
||||
for (const row of trainingRows) {
|
||||
// console.log(row);
|
||||
if (updateCallback && i % updateFrequency === 0) await updateCallback(i);
|
||||
let outputTokens = 0;
|
||||
if (row.output) {
|
||||
outputTokens = countLlamaChatTokens([row.output as unknown as ChatCompletion.Choice.Message]);
|
||||
}
|
||||
// console.log("outputTokens", outputTokens);
|
||||
datasetEntriesToCreate.push({
|
||||
datasetId: datasetId,
|
||||
input: row.input as unknown as Prisma.InputJsonValue,
|
||||
output: (row.output as unknown as Prisma.InputJsonValue) ?? {
|
||||
role: "assistant",
|
||||
content: "",
|
||||
},
|
||||
inputTokens: countLlamaChatTokens(
|
||||
row.input as unknown as CreateChatCompletionRequestMessage[],
|
||||
),
|
||||
outputTokens,
|
||||
type: typesToAssign.pop() as "TRAIN" | "TEST",
|
||||
});
|
||||
i++;
|
||||
}
|
||||
|
||||
return datasetEntriesToCreate;
|
||||
};
|
||||
33
app/src/state/selectedDatasetEntriesSlice.ts
Normal file
33
app/src/state/selectedDatasetEntriesSlice.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { type SliceCreator } from "./store";
|
||||
|
||||
export type SelectedDatasetEntriesSlice = {
|
||||
selectedIds: Set<string>;
|
||||
toggleSelectedId: (id: string) => void;
|
||||
addSelectedIds: (ids: string[]) => void;
|
||||
clearSelectedIds: () => void;
|
||||
};
|
||||
|
||||
export const createSelectedDatasetEntriesSlice: SliceCreator<SelectedDatasetEntriesSlice> = (
|
||||
set,
|
||||
) => ({
|
||||
selectedIds: new Set(),
|
||||
toggleSelectedId: (id: string) =>
|
||||
set((state) => {
|
||||
if (state.selectedDatasetEntries.selectedIds.has(id)) {
|
||||
state.selectedDatasetEntries.selectedIds.delete(id);
|
||||
} else {
|
||||
state.selectedDatasetEntries.selectedIds.add(id);
|
||||
}
|
||||
}),
|
||||
addSelectedIds: (ids: string[]) =>
|
||||
set((state) => {
|
||||
state.selectedDatasetEntries.selectedIds = new Set([
|
||||
...state.selectedDatasetEntries.selectedIds,
|
||||
...ids,
|
||||
]);
|
||||
}),
|
||||
clearSelectedIds: () =>
|
||||
set((state) => {
|
||||
state.selectedDatasetEntries.selectedIds = new Set();
|
||||
}),
|
||||
});
|
||||
@@ -7,7 +7,7 @@ export type SelectedLogsSlice = {
|
||||
clearSelectedLogIds: () => void;
|
||||
};
|
||||
|
||||
export const createSelectedLogsSlice: SliceCreator<SelectedLogsSlice> = (set, get) => ({
|
||||
export const createSelectedLogsSlice: SliceCreator<SelectedLogsSlice> = (set) => ({
|
||||
selectedLogIds: new Set(),
|
||||
toggleSelectedLogId: (id: string) =>
|
||||
set((state) => {
|
||||
|
||||
33
app/src/state/sharedArgumentsEditor.slice.ts
Normal file
33
app/src/state/sharedArgumentsEditor.slice.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import loader, { type Monaco } from "@monaco-editor/loader";
|
||||
|
||||
import { type SliceCreator } from "./store";
|
||||
|
||||
export const editorBackground = "#fafafa";
|
||||
|
||||
export type SharedArgumentsEditorSlice = {
|
||||
monaco: null | Monaco;
|
||||
loadMonaco: () => Promise<void>;
|
||||
};
|
||||
|
||||
export const createArgumentsEditorSlice: SliceCreator<SharedArgumentsEditorSlice> = (set, get) => ({
|
||||
monaco: loader.__getMonacoInstance(),
|
||||
loadMonaco: async () => {
|
||||
// We only want to run this client-side
|
||||
if (typeof window === "undefined") return;
|
||||
|
||||
const monaco = await loader.init();
|
||||
|
||||
monaco.editor.defineTheme("customTheme", {
|
||||
base: "vs",
|
||||
inherit: true,
|
||||
rules: [],
|
||||
colors: {
|
||||
"editor.background": "#ffffff",
|
||||
},
|
||||
});
|
||||
|
||||
set((state) => {
|
||||
state.sharedArgumentsEditor.monaco = monaco;
|
||||
});
|
||||
},
|
||||
});
|
||||
@@ -7,9 +7,17 @@ import {
|
||||
type SharedVariantEditorSlice,
|
||||
createVariantEditorSlice,
|
||||
} from "./sharedVariantEditor.slice";
|
||||
import {
|
||||
type SharedArgumentsEditorSlice,
|
||||
createArgumentsEditorSlice,
|
||||
} from "./sharedArgumentsEditor.slice";
|
||||
import { type APIClient } from "~/utils/api";
|
||||
import { type PersistedState, persistOptions } from "./persist";
|
||||
import { type SelectedLogsSlice, createSelectedLogsSlice } from "./selectedLogsSlice";
|
||||
import {
|
||||
type SelectedDatasetEntriesSlice,
|
||||
createSelectedDatasetEntriesSlice,
|
||||
} from "./selectedDatasetEntriesSlice";
|
||||
import { type LogFiltersSlice, createLogFiltersSlice } from "./logFiltersSlice";
|
||||
import { type ColumnVisibilitySlice, createColumnVisibilitySlice } from "./columnVisiblitySlice";
|
||||
import { type FeatureFlagsSlice, createFeatureFlagsSlice } from "./featureFlags";
|
||||
@@ -18,15 +26,14 @@ enableMapSet();
|
||||
|
||||
export type State = {
|
||||
isRehydrated: boolean;
|
||||
drawerOpen: boolean;
|
||||
openDrawer: () => void;
|
||||
closeDrawer: () => void;
|
||||
api: APIClient | null;
|
||||
setApi: (api: APIClient) => void;
|
||||
sharedVariantEditor: SharedVariantEditorSlice;
|
||||
sharedArgumentsEditor: SharedArgumentsEditorSlice;
|
||||
selectedProjectId: string | null;
|
||||
setSelectedProjectId: (id: string) => void;
|
||||
selectedLogs: SelectedLogsSlice;
|
||||
selectedDatasetEntries: SelectedDatasetEntriesSlice;
|
||||
logFilters: LogFiltersSlice;
|
||||
columnVisibility: ColumnVisibilitySlice;
|
||||
featureFlags: FeatureFlagsSlice;
|
||||
@@ -46,22 +53,15 @@ const useBaseStore = create<State, [["zustand/persist", PersistedState], ["zusta
|
||||
set((state) => {
|
||||
state.api = api;
|
||||
}),
|
||||
drawerOpen: false,
|
||||
openDrawer: () =>
|
||||
set((state) => {
|
||||
state.drawerOpen = true;
|
||||
}),
|
||||
closeDrawer: () =>
|
||||
set((state) => {
|
||||
state.drawerOpen = false;
|
||||
}),
|
||||
sharedVariantEditor: createVariantEditorSlice(set, get, ...rest),
|
||||
sharedArgumentsEditor: createArgumentsEditorSlice(set, get, ...rest),
|
||||
selectedProjectId: null,
|
||||
setSelectedProjectId: (id: string) =>
|
||||
set((state) => {
|
||||
state.selectedProjectId = id;
|
||||
}),
|
||||
selectedLogs: createSelectedLogsSlice(set, get, ...rest),
|
||||
selectedDatasetEntries: createSelectedDatasetEntriesSlice(set, get, ...rest),
|
||||
logFilters: createLogFiltersSlice(set, get, ...rest),
|
||||
columnVisibility: createColumnVisibilitySlice(set, get, ...rest),
|
||||
featureFlags: createFeatureFlagsSlice(set, get, ...rest),
|
||||
|
||||
94
app/src/utils/azure/server.ts
Normal file
94
app/src/utils/azure/server.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import {
|
||||
BlobServiceClient,
|
||||
generateAccountSASQueryParameters,
|
||||
AccountSASPermissions,
|
||||
AccountSASServices,
|
||||
AccountSASResourceTypes,
|
||||
StorageSharedKeyCredential,
|
||||
SASProtocol,
|
||||
} from "@azure/storage-blob";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
|
||||
const accountName = process.env.AZURE_STORAGE_ACCOUNT_NAME;
|
||||
if (!accountName) throw Error("Azure Storage accountName not found");
|
||||
const accountKey = process.env.AZURE_STORAGE_ACCOUNT_KEY;
|
||||
if (!accountKey) throw Error("Azure Storage accountKey not found");
|
||||
const containerName = process.env.AZURE_STORAGE_CONTAINER_NAME;
|
||||
if (!containerName) throw Error("Azure Storage containerName not found");
|
||||
|
||||
const sharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey);
|
||||
|
||||
const blobServiceClient = new BlobServiceClient(
|
||||
`https://${accountName}.blob.core.windows.net`,
|
||||
new DefaultAzureCredential(),
|
||||
);
|
||||
|
||||
const containerClient = blobServiceClient.getContainerClient(containerName);
|
||||
|
||||
export const generateServiceClientUrl = () => {
|
||||
const sasOptions = {
|
||||
services: AccountSASServices.parse("b").toString(), // blobs
|
||||
resourceTypes: AccountSASResourceTypes.parse("sco").toString(), // service, container, object
|
||||
permissions: AccountSASPermissions.parse("w"), // write permissions
|
||||
protocol: SASProtocol.Https,
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(new Date().valueOf() + 10 * 60 * 1000), // 10 minutes
|
||||
};
|
||||
let sasToken = generateAccountSASQueryParameters(sasOptions, sharedKeyCredential).toString();
|
||||
|
||||
// remove leading "?"
|
||||
sasToken = sasToken[0] === "?" ? sasToken.substring(1) : sasToken;
|
||||
return {
|
||||
serviceClientUrl: `https://${accountName}.blob.core.windows.net?${sasToken}`,
|
||||
containerName,
|
||||
};
|
||||
};
|
||||
|
||||
export async function downloadBlobToString(
|
||||
blobName: string,
|
||||
onProgress?: (progress: number) => Promise<void>,
|
||||
chunkInterval?: number,
|
||||
) {
|
||||
const blobClient = containerClient.getBlobClient(blobName);
|
||||
|
||||
const downloadResponse = await blobClient.download();
|
||||
|
||||
if (!downloadResponse) throw Error("error downloading blob");
|
||||
if (!downloadResponse.readableStreamBody)
|
||||
throw Error("downloadResponse.readableStreamBody not found");
|
||||
|
||||
const downloaded = await streamToBuffer(
|
||||
downloadResponse.readableStreamBody,
|
||||
onProgress,
|
||||
chunkInterval,
|
||||
);
|
||||
return downloaded.toString();
|
||||
}
|
||||
|
||||
async function streamToBuffer(
|
||||
readableStream: NodeJS.ReadableStream,
|
||||
onProgress?: (progress: number) => Promise<void>,
|
||||
chunkInterval = 1048576, // send progress every 1MB
|
||||
): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Uint8Array[] = [];
|
||||
let bytesDownloaded = 0;
|
||||
let lastReportedByteCount = 0;
|
||||
|
||||
readableStream.on("data", (data: ArrayBuffer) => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
|
||||
bytesDownloaded += data.byteLength;
|
||||
|
||||
if (onProgress && bytesDownloaded - lastReportedByteCount >= chunkInterval) {
|
||||
void onProgress(bytesDownloaded); // progress in Bytes
|
||||
lastReportedByteCount = bytesDownloaded;
|
||||
}
|
||||
});
|
||||
|
||||
readableStream.on("end", () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
|
||||
readableStream.on("error", reject);
|
||||
});
|
||||
}
|
||||
30
app/src/utils/azure/website.ts
Normal file
30
app/src/utils/azure/website.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { BlobServiceClient } from "@azure/storage-blob";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
export const uploadDatasetEntryFile = async (file: File) => {
|
||||
const { selectedProjectId: projectId, api } = useAppStore.getState();
|
||||
if (!projectId) throw Error("projectId not found");
|
||||
if (!api) throw Error("api not initialized");
|
||||
const { serviceClientUrl, containerName } = await api.client.datasets.getServiceClientUrl.query({
|
||||
projectId,
|
||||
});
|
||||
|
||||
const blobServiceClient = new BlobServiceClient(serviceClientUrl);
|
||||
// create container client
|
||||
const containerClient = blobServiceClient.getContainerClient(containerName);
|
||||
|
||||
// base name without extension
|
||||
const basename = file.name.split("/").pop()?.split(".").shift();
|
||||
if (!basename) throw Error("basename not found");
|
||||
|
||||
const blobName = `${basename}-${uuidv4()}.jsonl`;
|
||||
// create blob client
|
||||
const blobClient = containerClient.getBlockBlobClient(blobName);
|
||||
|
||||
// upload file
|
||||
await blobClient.uploadData(file);
|
||||
|
||||
return blobName;
|
||||
};
|
||||
@@ -1,5 +1,7 @@
|
||||
import { type ChatCompletion } from "openai/resources/chat";
|
||||
import { GPTTokens } from "gpt-tokens";
|
||||
import llamaTokenizer from "llama-tokenizer-js";
|
||||
|
||||
import { type SupportedModel } from "~/modelProviders/openai-ChatCompletion";
|
||||
|
||||
interface GPTTokensMessageItem {
|
||||
@@ -12,6 +14,21 @@ export const countOpenAIChatTokens = (
|
||||
model: SupportedModel,
|
||||
messages: ChatCompletion.Choice.Message[],
|
||||
) => {
|
||||
return new GPTTokens({ model, messages: messages as unknown as GPTTokensMessageItem[] })
|
||||
.usedTokens;
|
||||
const reformattedMessages = messages.map((message) => ({
|
||||
role: message.role,
|
||||
// Not completely accurate, but gives a rough idea of the token count
|
||||
content: message.content ?? JSON.stringify(message.function_call),
|
||||
}));
|
||||
return new GPTTokens({
|
||||
model,
|
||||
messages: reformattedMessages as unknown as GPTTokensMessageItem[],
|
||||
}).usedTokens;
|
||||
};
|
||||
|
||||
export const countLlamaChatTokens = (messages: ChatCompletion.Choice.Message[]) => {
|
||||
const stringToTokenize = messages
|
||||
.map((message) => message.content || JSON.stringify(message.function_call))
|
||||
.join("\n");
|
||||
const tokens = llamaTokenizer.encode(stringToTokenize);
|
||||
return tokens.length;
|
||||
};
|
||||
|
||||
@@ -148,6 +148,49 @@ export const useScenarioVars = () => {
|
||||
);
|
||||
};
|
||||
|
||||
export const useDatasets = () => {
|
||||
const selectedProjectId = useAppStore((state) => state.selectedProjectId);
|
||||
return api.datasets.list.useQuery(
|
||||
{ projectId: selectedProjectId ?? "" },
|
||||
{ enabled: !!selectedProjectId },
|
||||
);
|
||||
};
|
||||
|
||||
export const useDataset = () => {
|
||||
const router = useRouter();
|
||||
const dataset = api.datasets.get.useQuery(
|
||||
{ id: router.query.id as string },
|
||||
{ enabled: !!router.query.id },
|
||||
);
|
||||
|
||||
return dataset;
|
||||
};
|
||||
|
||||
export const useDatasetEntries = () => {
|
||||
const dataset = useDataset().data;
|
||||
const { page, pageSize } = usePageParams();
|
||||
|
||||
const { data, isLoading, ...rest } = api.datasetEntries.list.useQuery(
|
||||
{ datasetId: dataset?.id ?? "", page, pageSize },
|
||||
{ enabled: !!dataset?.id },
|
||||
);
|
||||
|
||||
const [stableData, setStableData] = useState(data);
|
||||
|
||||
useEffect(() => {
|
||||
// Prevent annoying flashes while logs are loading from the server
|
||||
if (!isLoading) {
|
||||
setStableData(data);
|
||||
}
|
||||
}, [data, isLoading]);
|
||||
|
||||
return { data: stableData, isLoading, ...rest };
|
||||
};
|
||||
|
||||
export const useDatasetEntry = (entryId: string | null) => {
|
||||
return api.datasetEntries.get.useQuery({ id: entryId as string }, { enabled: !!entryId });
|
||||
};
|
||||
|
||||
export const useLoggedCalls = (applyFilters = true) => {
|
||||
const selectedProjectId = useAppStore((state) => state.selectedProjectId);
|
||||
const { page, pageSize } = usePageParams();
|
||||
|
||||
@@ -10,3 +10,60 @@ export const lookupModel = (provider: string, model: string) => {
|
||||
|
||||
export const modelLabel = (provider: string, model: string) =>
|
||||
`${provider}/${lookupModel(provider, model)?.name ?? model}`;
|
||||
|
||||
// Check if the str could be parsed to a message function call
|
||||
export const parseableToFunctionCall = (str: string) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let parsedJSON: any;
|
||||
try {
|
||||
parsedJSON = JSON.parse(str);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if the parsedJSON is an object and not null
|
||||
if (typeof parsedJSON !== "object" || parsedJSON === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if only the keys "name" and "arguments" exist
|
||||
const keys = Object.keys(parsedJSON as Record<string, unknown>);
|
||||
if (keys.length !== 2 || !keys.includes("name") || !keys.includes("arguments")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if both "name" and "arguments" are of type string
|
||||
if (typeof parsedJSON.name !== "string" || typeof parsedJSON.arguments !== "string") {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if the "arguments" value is parseable to an object
|
||||
let parsedArguments: unknown;
|
||||
try {
|
||||
parsedArguments = JSON.parse(parsedJSON["arguments"]);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if parsedArguments is an object and not null
|
||||
if (typeof parsedArguments !== "object" || parsedArguments === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
export const formatFileSize = (bytes: number, decimals = 2) => {
|
||||
if (bytes === 0) return "0 Bytes";
|
||||
|
||||
const k = 1024;
|
||||
const dm = decimals < 0 ? 0 : decimals;
|
||||
const sizes = ["Bytes", "KB", "MB", "GB", "TB"];
|
||||
|
||||
for (const size of sizes) {
|
||||
if (bytes < k) return `${parseFloat(bytes.toFixed(dm))} ${size}`;
|
||||
bytes /= k;
|
||||
}
|
||||
|
||||
return "> 1024 TB";
|
||||
};
|
||||
|
||||
@@ -19,7 +19,9 @@
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"~/*": ["./src/*"]
|
||||
}
|
||||
},
|
||||
"typeRoots": ["./types", "./node_modules/@types"],
|
||||
"types": ["llama-tokenizer-js", "node"]
|
||||
},
|
||||
"include": [
|
||||
".eslintrc.cjs",
|
||||
|
||||
4
app/types/llama-tokenizer-js/index.d.ts
vendored
Normal file
4
app/types/llama-tokenizer-js/index.d.ts
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
declare module "llama-tokenizer-js" {
|
||||
export function encode(input: string): number[];
|
||||
export function decode(input: number[]): string;
|
||||
}
|
||||
360
pnpm-lock.yaml
generated
360
pnpm-lock.yaml
generated
@@ -14,6 +14,12 @@ importers:
|
||||
'@apidevtools/json-schema-ref-parser':
|
||||
specifier: ^10.1.0
|
||||
version: 10.1.0
|
||||
'@azure/identity':
|
||||
specifier: ^3.3.0
|
||||
version: 3.3.0
|
||||
'@azure/storage-blob':
|
||||
specifier: 12.15.0
|
||||
version: 12.15.0
|
||||
'@babel/standalone':
|
||||
specifier: ^7.22.9
|
||||
version: 7.22.9
|
||||
@@ -143,6 +149,9 @@ importers:
|
||||
kysely-codegen:
|
||||
specifier: ^0.10.1
|
||||
version: 0.10.1(kysely@0.26.1)(pg@8.11.2)
|
||||
llama-tokenizer-js:
|
||||
specifier: ^1.1.3
|
||||
version: 1.1.3
|
||||
lodash-es:
|
||||
specifier: ^4.17.21
|
||||
version: 4.17.21
|
||||
@@ -465,6 +474,184 @@ packages:
|
||||
js-yaml: 4.1.0
|
||||
dev: true
|
||||
|
||||
/@azure/abort-controller@1.1.0:
|
||||
resolution: {integrity: sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
dependencies:
|
||||
tslib: 2.6.1
|
||||
dev: false
|
||||
|
||||
/@azure/core-auth@1.5.0:
|
||||
resolution: {integrity: sha512-udzoBuYG1VBoHVohDTrvKjyzel34zt77Bhp7dQntVGGD0ehVq48owENbBG8fIgkHRNUBQH5k1r0hpoMu5L8+kw==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
'@azure/abort-controller': 1.1.0
|
||||
'@azure/core-util': 1.4.0
|
||||
tslib: 2.6.1
|
||||
dev: false
|
||||
|
||||
/@azure/core-client@1.7.3:
|
||||
resolution: {integrity: sha512-kleJ1iUTxcO32Y06dH9Pfi9K4U+Tlb111WXEnbt7R/ne+NLRwppZiTGJuTD5VVoxTMK5NTbEtm5t2vcdNCFe2g==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
'@azure/abort-controller': 1.1.0
|
||||
'@azure/core-auth': 1.5.0
|
||||
'@azure/core-rest-pipeline': 1.12.0
|
||||
'@azure/core-tracing': 1.0.1
|
||||
'@azure/core-util': 1.4.0
|
||||
'@azure/logger': 1.0.4
|
||||
tslib: 2.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
dev: false
|
||||
|
||||
/@azure/core-http@3.0.3:
|
||||
resolution: {integrity: sha512-QMib3wXotJMFhHgmJBPUF9YsyErw34H0XDFQd9CauH7TPB+RGcyl9Ayy7iURtJB04ngXhE6YwrQsWDXlSLrilg==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
'@azure/abort-controller': 1.1.0
|
||||
'@azure/core-auth': 1.5.0
|
||||
'@azure/core-tracing': 1.0.0-preview.13
|
||||
'@azure/core-util': 1.4.0
|
||||
'@azure/logger': 1.0.4
|
||||
'@types/node-fetch': 2.6.4
|
||||
'@types/tunnel': 0.0.3
|
||||
form-data: 4.0.0
|
||||
node-fetch: 2.6.12(encoding@0.1.13)
|
||||
process: 0.11.10
|
||||
tslib: 2.6.1
|
||||
tunnel: 0.0.6
|
||||
uuid: 8.3.2
|
||||
xml2js: 0.5.0
|
||||
transitivePeerDependencies:
|
||||
- encoding
|
||||
dev: false
|
||||
|
||||
/@azure/core-lro@2.5.4:
|
||||
resolution: {integrity: sha512-3GJiMVH7/10bulzOKGrrLeG/uCBH/9VtxqaMcB9lIqAeamI/xYQSHJL/KcsLDuH+yTjYpro/u6D/MuRe4dN70Q==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
'@azure/abort-controller': 1.1.0
|
||||
'@azure/core-util': 1.4.0
|
||||
'@azure/logger': 1.0.4
|
||||
tslib: 2.6.1
|
||||
dev: false
|
||||
|
||||
/@azure/core-paging@1.5.0:
|
||||
resolution: {integrity: sha512-zqWdVIt+2Z+3wqxEOGzR5hXFZ8MGKK52x4vFLw8n58pR6ZfKRx3EXYTxTaYxYHc/PexPUTyimcTWFJbji9Z6Iw==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
tslib: 2.6.1
|
||||
dev: false
|
||||
|
||||
/@azure/core-rest-pipeline@1.12.0:
|
||||
resolution: {integrity: sha512-+MnSB0vGZjszSzr5AW8z93/9fkDu2RLtWmAN8gskURq7EW2sSwqy8jZa0V26rjuBVkwhdA3Hw8z3VWoeBUOw+A==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
'@azure/abort-controller': 1.1.0
|
||||
'@azure/core-auth': 1.5.0
|
||||
'@azure/core-tracing': 1.0.1
|
||||
'@azure/core-util': 1.4.0
|
||||
'@azure/logger': 1.0.4
|
||||
form-data: 4.0.0
|
||||
http-proxy-agent: 5.0.0
|
||||
https-proxy-agent: 5.0.1
|
||||
tslib: 2.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
dev: false
|
||||
|
||||
/@azure/core-tracing@1.0.0-preview.13:
|
||||
resolution: {integrity: sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.4.1
|
||||
tslib: 2.6.1
|
||||
dev: false
|
||||
|
||||
/@azure/core-tracing@1.0.1:
|
||||
resolution: {integrity: sha512-I5CGMoLtX+pI17ZdiFJZgxMJApsK6jjfm85hpgp3oazCdq5Wxgh4wMr7ge/TTWW1B5WBuvIOI1fMU/FrOAMKrw==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
dependencies:
|
||||
tslib: 2.6.1
|
||||
dev: false
|
||||
|
||||
/@azure/core-util@1.4.0:
|
||||
resolution: {integrity: sha512-eGAyJpm3skVQoLiRqm/xPa+SXi/NPDdSHMxbRAz2lSprd+Zs+qrpQGQQ2VQ3Nttu+nSZR4XoYQC71LbEI7jsig==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
'@azure/abort-controller': 1.1.0
|
||||
tslib: 2.6.1
|
||||
dev: false
|
||||
|
||||
/@azure/identity@3.3.0:
|
||||
resolution: {integrity: sha512-gISa/dAAxrWt6F2WiDXZY0y2xY4MLlN2wkNW4cPuq5OgPQKLSkxLc4I2WR04puTfZyQZnpXbAapAMEj1b96fgg==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
'@azure/abort-controller': 1.1.0
|
||||
'@azure/core-auth': 1.5.0
|
||||
'@azure/core-client': 1.7.3
|
||||
'@azure/core-rest-pipeline': 1.12.0
|
||||
'@azure/core-tracing': 1.0.1
|
||||
'@azure/core-util': 1.4.0
|
||||
'@azure/logger': 1.0.4
|
||||
'@azure/msal-browser': 2.38.2
|
||||
'@azure/msal-common': 13.3.0
|
||||
'@azure/msal-node': 1.18.3
|
||||
events: 3.3.0
|
||||
jws: 4.0.0
|
||||
open: 8.4.2
|
||||
stoppable: 1.1.0
|
||||
tslib: 2.6.1
|
||||
uuid: 8.3.2
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
dev: false
|
||||
|
||||
/@azure/logger@1.0.4:
|
||||
resolution: {integrity: sha512-ustrPY8MryhloQj7OWGe+HrYx+aoiOxzbXTtgblbV3xwCqpzUK36phH3XNHQKj3EPonyFUuDTfR3qFhTEAuZEg==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
tslib: 2.6.1
|
||||
dev: false
|
||||
|
||||
/@azure/msal-browser@2.38.2:
|
||||
resolution: {integrity: sha512-71BeIn2we6LIgMplwCSaMq5zAwmalyJR3jFcVOZxNVfQ1saBRwOD+P77nLs5vrRCedVKTq8RMFhIOdpMLNno0A==}
|
||||
engines: {node: '>=0.8.0'}
|
||||
dependencies:
|
||||
'@azure/msal-common': 13.3.0
|
||||
dev: false
|
||||
|
||||
/@azure/msal-common@13.3.0:
|
||||
resolution: {integrity: sha512-/VFWTicjcJbrGp3yQP7A24xU95NiDMe23vxIU1U6qdRPFsprMDNUohMudclnd+WSHE4/McqkZs/nUU3sAKkVjg==}
|
||||
engines: {node: '>=0.8.0'}
|
||||
dev: false
|
||||
|
||||
/@azure/msal-node@1.18.3:
|
||||
resolution: {integrity: sha512-lI1OsxNbS/gxRD4548Wyj22Dk8kS7eGMwD9GlBZvQmFV8FJUXoXySL1BiNzDsHUE96/DS/DHmA+F73p1Dkcktg==}
|
||||
engines: {node: 10 || 12 || 14 || 16 || 18}
|
||||
dependencies:
|
||||
'@azure/msal-common': 13.3.0
|
||||
jsonwebtoken: 9.0.2
|
||||
uuid: 8.3.2
|
||||
dev: false
|
||||
|
||||
/@azure/storage-blob@12.15.0:
|
||||
resolution: {integrity: sha512-e7JBKLOFi0QVJqqLzrjx1eL3je3/Ug2IQj24cTM9b85CsnnFjLGeGjJVIjbGGZaytewiCEG7r3lRwQX7fKj0/w==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
dependencies:
|
||||
'@azure/abort-controller': 1.1.0
|
||||
'@azure/core-http': 3.0.3
|
||||
'@azure/core-lro': 2.5.4
|
||||
'@azure/core-paging': 1.5.0
|
||||
'@azure/core-tracing': 1.0.0-preview.13
|
||||
'@azure/logger': 1.0.4
|
||||
events: 3.3.0
|
||||
tslib: 2.6.1
|
||||
transitivePeerDependencies:
|
||||
- encoding
|
||||
dev: false
|
||||
|
||||
/@babel/code-frame@7.22.10:
|
||||
resolution: {integrity: sha512-/KKIMG4UEL35WmI9OlvMhurwtytjvXoFcGNrOvyG9zIzA8YmPjVtIZUf7b05+TPO7G7/GEmLHDaoCgACHl9hhA==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
@@ -2602,6 +2789,11 @@ packages:
|
||||
openapi-typescript: 5.4.1
|
||||
dev: true
|
||||
|
||||
/@opentelemetry/api@1.4.1:
|
||||
resolution: {integrity: sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
dev: false
|
||||
|
||||
/@panva/hkdf@1.1.1:
|
||||
resolution: {integrity: sha512-dhPeilub1NuIG0X5Kvhh9lH4iW3ZsHlnzwgwbOlgwQ2wG1IqFzsgHqmKPk3WzsdWAeaxKJxgM0+W433RmN45GA==}
|
||||
dev: false
|
||||
@@ -2916,6 +3108,11 @@ packages:
|
||||
use-sync-external-store: 1.2.0(react@18.2.0)
|
||||
dev: false
|
||||
|
||||
/@tootallnate/once@2.0.0:
|
||||
resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==}
|
||||
engines: {node: '>= 10'}
|
||||
dev: false
|
||||
|
||||
/@trpc/client@10.26.0(@trpc/server@10.26.0):
|
||||
resolution: {integrity: sha512-ojHxQFIE97rBEGPK8p1ijbzo0T1IdEBoJ9fFSgWWL9FMuEEA/DNQ9s0uuiOrDKhCCdTFT1unfRharoJhB2/O2w==}
|
||||
peerDependencies:
|
||||
@@ -3333,6 +3530,12 @@ packages:
|
||||
resolution: {integrity: sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw==}
|
||||
dev: false
|
||||
|
||||
/@types/tunnel@0.0.3:
|
||||
resolution: {integrity: sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==}
|
||||
dependencies:
|
||||
'@types/node': 20.4.10
|
||||
dev: false
|
||||
|
||||
/@types/unist@2.0.7:
|
||||
resolution: {integrity: sha512-cputDpIbFgLUaGQn6Vqg3/YsJwxUwHLO13v3i5ouxT4lat0khip9AEWxtERujXV9wxIB1EyF97BSJFt6vpdI8g==}
|
||||
dev: false
|
||||
@@ -4102,6 +4305,10 @@ packages:
|
||||
resolution: {integrity: sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==}
|
||||
dev: false
|
||||
|
||||
/buffer-equal-constant-time@1.0.1:
|
||||
resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==}
|
||||
dev: false
|
||||
|
||||
/buffer-from@0.1.2:
|
||||
resolution: {integrity: sha512-RiWIenusJsmI2KcvqQABB83tLxCByE3upSP8QU3rJDMVFGPWLvPQJt/O1Su9moRWeH7d+Q2HYb68f6+v+tw2vg==}
|
||||
dev: false
|
||||
@@ -4707,6 +4914,11 @@ packages:
|
||||
resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==}
|
||||
dev: true
|
||||
|
||||
/define-lazy-prop@2.0.0:
|
||||
resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==}
|
||||
engines: {node: '>=8'}
|
||||
dev: false
|
||||
|
||||
/define-properties@1.2.0:
|
||||
resolution: {integrity: sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -4818,6 +5030,12 @@ packages:
|
||||
safer-buffer: 2.1.2
|
||||
dev: false
|
||||
|
||||
/ecdsa-sig-formatter@1.0.11:
|
||||
resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==}
|
||||
dependencies:
|
||||
safe-buffer: 5.2.1
|
||||
dev: false
|
||||
|
||||
/ee-first@1.1.1:
|
||||
resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==}
|
||||
dev: false
|
||||
@@ -6061,6 +6279,17 @@ packages:
|
||||
toidentifier: 1.0.1
|
||||
dev: false
|
||||
|
||||
/http-proxy-agent@5.0.0:
|
||||
resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==}
|
||||
engines: {node: '>= 6'}
|
||||
dependencies:
|
||||
'@tootallnate/once': 2.0.0
|
||||
agent-base: 6.0.2
|
||||
debug: 4.3.4
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
dev: false
|
||||
|
||||
/http-signature@1.2.0:
|
||||
resolution: {integrity: sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==}
|
||||
engines: {node: '>=0.8', npm: '>=1.3.7'}
|
||||
@@ -6256,6 +6485,12 @@ packages:
|
||||
resolution: {integrity: sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==}
|
||||
dev: false
|
||||
|
||||
/is-docker@2.2.1:
|
||||
resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==}
|
||||
engines: {node: '>=8'}
|
||||
hasBin: true
|
||||
dev: false
|
||||
|
||||
/is-extglob@2.1.1:
|
||||
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
@@ -6370,6 +6605,13 @@ packages:
|
||||
engines: {node: '>=12.13'}
|
||||
dev: false
|
||||
|
||||
/is-wsl@2.2.0:
|
||||
resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
is-docker: 2.2.1
|
||||
dev: false
|
||||
|
||||
/isarray@0.0.1:
|
||||
resolution: {integrity: sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==}
|
||||
dev: false
|
||||
@@ -6399,7 +6641,7 @@ packages:
|
||||
resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
|
||||
engines: {node: '>= 10.13.0'}
|
||||
dependencies:
|
||||
'@types/node': 20.4.10
|
||||
'@types/node': 18.16.0
|
||||
merge-stream: 2.0.0
|
||||
supports-color: 8.1.1
|
||||
|
||||
@@ -6514,6 +6756,22 @@ packages:
|
||||
resolution: {integrity: sha512-S6cATIPVv1z0IlxdN+zUk5EPjkGCdnhN4wVSBlvoUO1tOLJootbo9CquNJmbIh4yikWHiUedhRYrNPn1arpEmQ==}
|
||||
dev: false
|
||||
|
||||
/jsonwebtoken@9.0.2:
|
||||
resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==}
|
||||
engines: {node: '>=12', npm: '>=6'}
|
||||
dependencies:
|
||||
jws: 3.2.2
|
||||
lodash.includes: 4.3.0
|
||||
lodash.isboolean: 3.0.3
|
||||
lodash.isinteger: 4.0.4
|
||||
lodash.isnumber: 3.0.3
|
||||
lodash.isplainobject: 4.0.6
|
||||
lodash.isstring: 4.0.1
|
||||
lodash.once: 4.1.1
|
||||
ms: 2.1.3
|
||||
semver: 7.5.4
|
||||
dev: false
|
||||
|
||||
/jsprim@1.4.2:
|
||||
resolution: {integrity: sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==}
|
||||
engines: {node: '>=0.6.0'}
|
||||
@@ -6534,6 +6792,36 @@ packages:
|
||||
object.values: 1.1.6
|
||||
dev: true
|
||||
|
||||
/jwa@1.4.1:
|
||||
resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==}
|
||||
dependencies:
|
||||
buffer-equal-constant-time: 1.0.1
|
||||
ecdsa-sig-formatter: 1.0.11
|
||||
safe-buffer: 5.2.1
|
||||
dev: false
|
||||
|
||||
/jwa@2.0.0:
|
||||
resolution: {integrity: sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==}
|
||||
dependencies:
|
||||
buffer-equal-constant-time: 1.0.1
|
||||
ecdsa-sig-formatter: 1.0.11
|
||||
safe-buffer: 5.2.1
|
||||
dev: false
|
||||
|
||||
/jws@3.2.2:
|
||||
resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==}
|
||||
dependencies:
|
||||
jwa: 1.4.1
|
||||
safe-buffer: 5.2.1
|
||||
dev: false
|
||||
|
||||
/jws@4.0.0:
|
||||
resolution: {integrity: sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==}
|
||||
dependencies:
|
||||
jwa: 2.0.0
|
||||
safe-buffer: 5.2.1
|
||||
dev: false
|
||||
|
||||
/kysely-codegen@0.10.1(kysely@0.26.1)(pg@8.11.2):
|
||||
resolution: {integrity: sha512-8Bslh952gN5gtucRv4jTZDFD18RBioS6M50zHfe5kwb5iSyEAunU4ZYMdHzkHraa4zxjg5/183XlOryBCXLRIw==}
|
||||
hasBin: true
|
||||
@@ -6605,6 +6893,10 @@ packages:
|
||||
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
||||
dev: false
|
||||
|
||||
/llama-tokenizer-js@1.1.3:
|
||||
resolution: {integrity: sha512-+BUgsLCXVQJkjiD/t7PdESLn+yXJIRX/BJfwzVVYfKZ9aN3gsP9xoadBZxKnCxGz2Slby+S7x41gUr2TKNaS4Q==}
|
||||
dev: false
|
||||
|
||||
/loader-runner@4.3.0:
|
||||
resolution: {integrity: sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==}
|
||||
engines: {node: '>=6.11.5'}
|
||||
@@ -6660,10 +6952,30 @@ packages:
|
||||
resolution: {integrity: sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==}
|
||||
dev: false
|
||||
|
||||
/lodash.includes@4.3.0:
|
||||
resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==}
|
||||
dev: false
|
||||
|
||||
/lodash.isboolean@3.0.3:
|
||||
resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==}
|
||||
dev: false
|
||||
|
||||
/lodash.isinteger@4.0.4:
|
||||
resolution: {integrity: sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==}
|
||||
dev: false
|
||||
|
||||
/lodash.isnumber@3.0.3:
|
||||
resolution: {integrity: sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==}
|
||||
dev: false
|
||||
|
||||
/lodash.isplainobject@4.0.6:
|
||||
resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==}
|
||||
dev: false
|
||||
|
||||
/lodash.isstring@4.0.1:
|
||||
resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==}
|
||||
dev: false
|
||||
|
||||
/lodash.merge@4.6.2:
|
||||
resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==}
|
||||
dev: true
|
||||
@@ -6672,6 +6984,10 @@ packages:
|
||||
resolution: {integrity: sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==}
|
||||
dev: false
|
||||
|
||||
/lodash.once@4.1.1:
|
||||
resolution: {integrity: sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==}
|
||||
dev: false
|
||||
|
||||
/lodash.union@4.6.0:
|
||||
resolution: {integrity: sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw==}
|
||||
dev: false
|
||||
@@ -7177,6 +7493,15 @@ packages:
|
||||
dependencies:
|
||||
wrappy: 1.0.2
|
||||
|
||||
/open@8.4.2:
|
||||
resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==}
|
||||
engines: {node: '>=12'}
|
||||
dependencies:
|
||||
define-lazy-prop: 2.0.0
|
||||
is-docker: 2.2.1
|
||||
is-wsl: 2.2.0
|
||||
dev: false
|
||||
|
||||
/openai@3.3.0:
|
||||
resolution: {integrity: sha512-uqxI/Au+aPRnsaQRe8CojU0eCR7I0mBiKjD3sNMzY6DaC1ZVrc85u98mtJW6voDug8fgGN+DIZmTDxTthxb7dQ==}
|
||||
dependencies:
|
||||
@@ -7627,6 +7952,11 @@ packages:
|
||||
resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
|
||||
dev: false
|
||||
|
||||
/process@0.11.10:
|
||||
resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==}
|
||||
engines: {node: '>= 0.6.0'}
|
||||
dev: false
|
||||
|
||||
/progress@2.0.3:
|
||||
resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==}
|
||||
engines: {node: '>=0.4.0'}
|
||||
@@ -8272,6 +8602,10 @@ packages:
|
||||
yoga-wasm-web: 0.3.3
|
||||
dev: false
|
||||
|
||||
/sax@1.2.4:
|
||||
resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==}
|
||||
dev: false
|
||||
|
||||
/scheduler@0.23.0:
|
||||
resolution: {integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==}
|
||||
dependencies:
|
||||
@@ -8296,7 +8630,6 @@ packages:
|
||||
hasBin: true
|
||||
dependencies:
|
||||
lru-cache: 6.0.0
|
||||
dev: true
|
||||
|
||||
/send@0.18.0:
|
||||
resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==}
|
||||
@@ -8504,6 +8837,11 @@ packages:
|
||||
resolution: {integrity: sha512-Rz6yejtVyWnVjC1RFvNmYL10kgjC49EOghxWn0RFqlCHGFpQx+Xe7yW3I4ceK1SGrWIGMjD5Kbue8W/udkbMJg==}
|
||||
dev: true
|
||||
|
||||
/stoppable@1.1.0:
|
||||
resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==}
|
||||
engines: {node: '>=4', npm: '>=6'}
|
||||
dev: false
|
||||
|
||||
/stream-buffers@3.0.2:
|
||||
resolution: {integrity: sha512-DQi1h8VEBA/lURbSwFtEHnSTb9s2/pwLEaFuNhXwy1Dx3Sa0lOuYT2yNUr4/j2fs8oCAMANtrZ5OrPZtyVs3MQ==}
|
||||
engines: {node: '>= 0.10.0'}
|
||||
@@ -8876,6 +9214,11 @@ packages:
|
||||
safe-buffer: 5.2.1
|
||||
dev: false
|
||||
|
||||
/tunnel@0.0.6:
|
||||
resolution: {integrity: sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==}
|
||||
engines: {node: '>=0.6.11 <=0.7.0 || >=0.7.3'}
|
||||
dev: false
|
||||
|
||||
/tweetnacl@0.14.5:
|
||||
resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==}
|
||||
dev: false
|
||||
@@ -9464,6 +9807,19 @@ packages:
|
||||
optional: true
|
||||
dev: false
|
||||
|
||||
/xml2js@0.5.0:
|
||||
resolution: {integrity: sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==}
|
||||
engines: {node: '>=4.0.0'}
|
||||
dependencies:
|
||||
sax: 1.2.4
|
||||
xmlbuilder: 11.0.1
|
||||
dev: false
|
||||
|
||||
/xmlbuilder@11.0.1:
|
||||
resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==}
|
||||
engines: {node: '>=4.0'}
|
||||
dev: false
|
||||
|
||||
/xmlhttprequest-ssl@2.0.0:
|
||||
resolution: {integrity: sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A==}
|
||||
engines: {node: '>=0.4.0'}
|
||||
|
||||
Reference in New Issue
Block a user