diff --git a/app/src/components/datasets/FileUploadCard.tsx b/app/src/components/datasets/FileUploadCard.tsx
deleted file mode 100644
index 1fd5046..0000000
--- a/app/src/components/datasets/FileUploadCard.tsx
+++ /dev/null
@@ -1,68 +0,0 @@
-import { VStack, HStack, Button, Text, Card, Progress, IconButton } from "@chakra-ui/react";
-import { BsX } from "react-icons/bs";
-
-import { type RouterOutputs, api } from "~/utils/api";
-import { useHandledAsyncCallback } from "~/utils/hooks";
-import { formatFileSize } from "~/utils/utils";
-
-type FileUpload = RouterOutputs["datasets"]["listFileUploads"][0];
-
-const FileUploadCard = ({ fileUpload }: { fileUpload: FileUpload }) => {
- const { id, fileName, fileSize, progress, status, errorMessage } = fileUpload;
-
- const utils = api.useContext();
-
- const hideFileUploadMutation = api.datasets.hideFileUpload.useMutation();
- const [hideFileUpload, hidingInProgress] = useHandledAsyncCallback(async () => {
- await hideFileUploadMutation.mutateAsync({ fileUploadId: id });
- await utils.datasets.listFileUploads.invalidate();
- }, [id, hideFileUploadMutation, utils]);
-
- const [refreshDatasetEntries] = useHandledAsyncCallback(async () => {
- await utils.datasetEntries.list.invalidate();
- }, [utils]);
-
- return (
-
-
-
-
- Uploading {fileName} ({formatFileSize(fileSize, 2)})
-
-
- {status === "COMPLETE" && (
-
- )}
-
-
-
-
- {errorMessage ? (
-
- {errorMessage}
-
- ) : (
- <>
-
- {status} ({progress}%)
-
-
- >
- )}
-
-
- );
-};
-
-export default FileUploadCard;
diff --git a/app/src/components/datasets/FileUploadsCard.tsx b/app/src/components/datasets/FileUploadsCard.tsx
new file mode 100644
index 0000000..9dcef47
--- /dev/null
+++ b/app/src/components/datasets/FileUploadsCard.tsx
@@ -0,0 +1,139 @@
+import { useState, useEffect } from "react";
+import {
+ VStack,
+ HStack,
+ Button,
+ Text,
+ Progress,
+ IconButton,
+ Portal,
+ Spinner,
+} from "@chakra-ui/react";
+import { BsX } from "react-icons/bs";
+
+import { type RouterOutputs, api } from "~/utils/api";
+import { useDataset, useHandledAsyncCallback } from "~/utils/hooks";
+import { formatFileSize } from "~/utils/utils";
+
+type FileUpload = RouterOutputs["datasets"]["listFileUploads"][0];
+
+const FileUploadsCard = () => {
+ const dataset = useDataset();
+ const [fileUploadsRefetchInterval, setFileUploadsRefetchInterval] = useState(500);
+ const fileUploads = api.datasets.listFileUploads.useQuery(
+ { datasetId: dataset.data?.id as string },
+ { enabled: !!dataset.data?.id, refetchInterval: fileUploadsRefetchInterval },
+ );
+ useEffect(() => {
+ if (fileUploads?.data?.some((fu) => fu.status !== "COMPLETE" && fu.status !== "ERROR")) {
+ setFileUploadsRefetchInterval(500);
+ } else {
+ setFileUploadsRefetchInterval(15000);
+ }
+ }, [fileUploads]);
+
+ const utils = api.useContext();
+
+ const hideFileUploadsMutation = api.datasets.hideFileUploads.useMutation();
+ const [hideAllFileUploads, hidingInProgress] = useHandledAsyncCallback(async () => {
+ if (!fileUploads.data?.length) return;
+ await hideFileUploadsMutation.mutateAsync({
+ fileUploadIds: fileUploads.data.map((upload) => upload.id),
+ });
+ await utils.datasets.listFileUploads.invalidate();
+ }, [hideFileUploadsMutation, fileUploads.data, utils]);
+
+ if (!fileUploads.data?.length) return null;
+
+ return (
+
+
+
+ Uploads
+
+
+ {fileUploads?.data?.map((upload) => )}
+
+
+ );
+};
+
+export default FileUploadsCard;
+
+const FileUploadRow = ({ fileUpload }: { fileUpload: FileUpload }) => {
+ const { id, fileName, fileSize, progress, status, errorMessage } = fileUpload;
+
+ const utils = api.useContext();
+
+ const hideFileUploadsMutation = api.datasets.hideFileUploads.useMutation();
+ const [hideFileUpload, hidingInProgress] = useHandledAsyncCallback(async () => {
+ await hideFileUploadsMutation.mutateAsync({ fileUploadIds: [id] });
+ }, [id, hideFileUploadsMutation, utils]);
+
+ const [refreshDatasetEntries] = useHandledAsyncCallback(async () => {
+ await hideFileUploadsMutation.mutateAsync({ fileUploadIds: [id] });
+ await utils.datasetEntries.list.invalidate();
+ }, [id, hideFileUploadsMutation, utils]);
+
+ return (
+
+
+
+ {fileName}
+ ({formatFileSize(fileSize, 2)})
+
+
+
+ {status === "COMPLETE" ? (
+
+ ) : (
+
+ )}
+
+
+
+ {errorMessage ? (
+
+ {errorMessage}
+
+ ) : (
+ <>
+
+ {status}
+
+
+ >
+ )}
+
+ );
+};
diff --git a/app/src/components/datasets/ImportDataButton.tsx b/app/src/components/datasets/UploadDataButton.tsx
similarity index 97%
rename from app/src/components/datasets/ImportDataButton.tsx
rename to app/src/components/datasets/UploadDataButton.tsx
index ea6d82c..9e86d93 100644
--- a/app/src/components/datasets/ImportDataButton.tsx
+++ b/app/src/components/datasets/UploadDataButton.tsx
@@ -26,26 +26,26 @@ import { validateTrainingRows, type TrainingRow, parseJSONL } from "./validateTr
import { uploadDatasetEntryFile } from "~/utils/azure/website";
import { formatFileSize } from "~/utils/utils";
-const ImportDataButton = () => {
+const UploadDataButton = () => {
const disclosure = useDisclosure();
return (
<>
-
+
>
);
};
-export default ImportDataButton;
+export default UploadDataButton;
-const ImportDataModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
+const UploadDataModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
const dataset = useDataset().data;
const [validationError, setValidationError] = useState(null);
diff --git a/app/src/pages/datasets/[id].tsx b/app/src/pages/datasets/[id].tsx
index 33d34cf..d00e891 100644
--- a/app/src/pages/datasets/[id].tsx
+++ b/app/src/pages/datasets/[id].tsx
@@ -25,10 +25,10 @@ import DatasetEntryPaginator from "~/components/datasets/DatasetEntryPaginator";
import { useAppStore } from "~/state/store";
import FineTuneButton from "~/components/datasets/FineTuneButton";
import ExperimentButton from "~/components/datasets/ExperimentButton";
-import ImportDataButton from "~/components/datasets/ImportDataButton";
+import UploadDataButton from "~/components/datasets/UploadDataButton";
// import DownloadButton from "~/components/datasets/DownloadButton";
import DeleteButton from "~/components/datasets/DeleteButton";
-import FileUploadCard from "~/components/datasets/FileUploadCard";
+import FileUploadsCard from "~/components/datasets/FileUploadsCard";
export default function Dataset() {
const utils = api.useContext();
@@ -41,19 +41,6 @@ export default function Dataset() {
setName(dataset.data?.name || "");
}, [dataset.data?.name]);
- const [fileUploadsRefetchInterval, setFileUploadsRefetchInterval] = useState(500);
- const fileUploads = api.datasets.listFileUploads.useQuery(
- { datasetId: dataset.data?.id as string },
- { enabled: !!dataset.data?.id, refetchInterval: fileUploadsRefetchInterval },
- );
- useEffect(() => {
- if (fileUploads?.data?.some((fu) => fu.status !== "COMPLETE" && fu.status !== "ERROR")) {
- setFileUploadsRefetchInterval(500);
- } else {
- setFileUploadsRefetchInterval(0);
- }
- }, [fileUploads]);
-
useEffect(() => {
useAppStore.getState().sharedArgumentsEditor.loadMonaco().catch(console.error);
}, []);
@@ -115,16 +102,9 @@ export default function Dataset() {
-
-
- {fileUploads?.data?.map((upload) => (
-
- ))}
-
-
-
+
{/* */}
@@ -133,6 +113,7 @@ export default function Dataset() {
+
>
diff --git a/app/src/server/api/routers/datasets.router.ts b/app/src/server/api/routers/datasets.router.ts
index af86a8e..9092665 100644
--- a/app/src/server/api/routers/datasets.router.ts
+++ b/app/src/server/api/routers/datasets.router.ts
@@ -3,7 +3,7 @@ import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { prisma } from "~/server/db";
import { requireCanModifyProject, requireCanViewProject } from "~/utils/accessControl";
-import { success } from "~/utils/errorHandling/standardResponses";
+import { error, success } from "~/utils/errorHandling/standardResponses";
import { generateServiceClientUrl } from "~/utils/azure/server";
import { queueImportDatasetEntries } from "~/server/tasks/importDatasetEntries.task";
@@ -148,19 +148,33 @@ export const datasetsRouter = createTRPCRouter({
orderBy: { createdAt: "desc" },
});
}),
- hideFileUpload: protectedProcedure
- .input(z.object({ fileUploadId: z.string() }))
+ hideFileUploads: protectedProcedure
+ .input(z.object({ fileUploadIds: z.string().array() }))
.mutation(async ({ input, ctx }) => {
- const { datasetId } = await prisma.datasetFileUpload.findUniqueOrThrow({
- where: { id: input.fileUploadId },
- });
- const { projectId } = await prisma.dataset.findUniqueOrThrow({
- where: { id: datasetId },
+ if (!input.fileUploadIds.length) return error("No file upload ids provided");
+
+ const {
+ dataset: { projectId, id: datasetId },
+ } = await prisma.datasetFileUpload.findUniqueOrThrow({
+ where: { id: input.fileUploadIds[0] },
+ select: {
+ dataset: {
+ select: {
+ id: true,
+ projectId: true,
+ },
+ },
+ },
});
await requireCanModifyProject(projectId, ctx);
- await prisma.datasetFileUpload.update({
- where: { id: input.fileUploadId },
+ await prisma.datasetFileUpload.updateMany({
+ where: {
+ id: {
+ in: input.fileUploadIds,
+ },
+ datasetId,
+ },
data: {
visible: false,
},
diff --git a/app/src/server/tasks/importDatasetEntries.task.ts b/app/src/server/tasks/importDatasetEntries.task.ts
index 0e3f231..3e3547c 100644
--- a/app/src/server/tasks/importDatasetEntries.task.ts
+++ b/app/src/server/tasks/importDatasetEntries.task.ts
@@ -104,6 +104,7 @@ export const importDatasetEntries = defineTask(
data: {
errorMessage: `Error formatting rows: ${e.message as string}`,
status: "ERROR",
+ visible: true,
},
});
return;
@@ -128,6 +129,7 @@ export const importDatasetEntries = defineTask(
data: {
status: "COMPLETE",
progress: 100,
+ visible: true,
},
});
},