From 649dc3376bf7cee29ac0ee3c15a7948c61e35879 Mon Sep 17 00:00:00 2001 From: David Corbitt Date: Mon, 14 Aug 2023 21:00:42 -0700 Subject: [PATCH] Debounce filter value updates --- .../migration.sql | 19 ++++++ app/prisma/schema.prisma | 5 +- app/prisma/seedDashboard.ts | 1 + .../requestLogs/LogFilters/LogFilter.tsx | 23 ++++++- .../requestLogs/LoggedCallsTable.tsx | 13 +++- .../server/api/routers/externalApi.router.ts | 1 + client-libs/typescript/src/openai/index.ts | 62 +++++++++---------- 7 files changed, 87 insertions(+), 37 deletions(-) create mode 100644 app/prisma/migrations/20230814233722_index_tags_by_project/migration.sql diff --git a/app/prisma/migrations/20230814233722_index_tags_by_project/migration.sql b/app/prisma/migrations/20230814233722_index_tags_by_project/migration.sql new file mode 100644 index 0000000..f51e691 --- /dev/null +++ b/app/prisma/migrations/20230814233722_index_tags_by_project/migration.sql @@ -0,0 +1,19 @@ +-- DropIndex +DROP INDEX "LoggedCallTag_name_idx"; +DROP INDEX "LoggedCallTag_name_value_idx"; + +-- AlterTable: Add projectId column without NOT NULL constraint for now +ALTER TABLE "LoggedCallTag" ADD COLUMN "projectId" UUID; + +-- Set the default value +UPDATE "LoggedCallTag" lct +SET "projectId" = lc."projectId" +FROM "LoggedCall" lc +WHERE lct."loggedCallId" = lc.id; + +-- Now set the NOT NULL constraint +ALTER TABLE "LoggedCallTag" ALTER COLUMN "projectId" SET NOT NULL; + +-- CreateIndex +CREATE INDEX "LoggedCallTag_projectId_name_idx" ON "LoggedCallTag"("projectId", "name"); +CREATE INDEX "LoggedCallTag_projectId_name_value_idx" ON "LoggedCallTag"("projectId", "name", "value"); diff --git a/app/prisma/schema.prisma b/app/prisma/schema.prisma index dfc39a0..5b557d9 100644 --- a/app/prisma/schema.prisma +++ b/app/prisma/schema.prisma @@ -326,12 +326,13 @@ model LoggedCallTag { id String @id @default(uuid()) @db.Uuid name String value String? + projectId String @db.Uuid loggedCallId String @db.Uuid loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade) - @@index([name]) - @@index([name, value]) + @@index([projectId, name]) + @@index([projectId, name, value]) } model ApiKey { diff --git a/app/prisma/seedDashboard.ts b/app/prisma/seedDashboard.ts index a12ad6a..ddeb5bc 100644 --- a/app/prisma/seedDashboard.ts +++ b/app/prisma/seedDashboard.ts @@ -389,6 +389,7 @@ for (let i = 0; i < 1437; i++) { }, }); loggedCallTagsToCreate.push({ + projectId: project.id, loggedCallId, name: "$model", value: template.reqPayload.model, diff --git a/app/src/components/requestLogs/LogFilters/LogFilter.tsx b/app/src/components/requestLogs/LogFilters/LogFilter.tsx index 33f8c48..5deafb9 100644 --- a/app/src/components/requestLogs/LogFilters/LogFilter.tsx +++ b/app/src/components/requestLogs/LogFilters/LogFilter.tsx @@ -1,9 +1,11 @@ +import { useCallback, useState } from "react"; import { HStack, IconButton, Input, Select } from "@chakra-ui/react"; import { BsTrash } from "react-icons/bs"; import { type LogFilter, comparators } from "~/state/logFiltersSlice"; import { useAppStore } from "~/state/store"; import { useFilterableFields } from "~/utils/hooks"; +import { debounce } from "lodash-es"; const LogFilter = ({ filter, index }: { filter: LogFilter; index: number }) => { const filterableFields = useFilterableFields(); @@ -13,6 +15,20 @@ const LogFilter = ({ filter, index }: { filter: LogFilter; index: number }) => { const { field, comparator, value } = filter; + const [editedValue, setEditedValue] = useState(""); + + const debouncedUpdateFilter = useCallback( + debounce( + (index: number, filter: LogFilter) => { + console.log("updating filter!!!"); + updateFilter(index, filter); + }, + 200, + { leading: true }, + ), + [updateFilter], + ); + return ( updateFilter(index, { ...filter, value: e.target.value })} + value={editedValue} + onChange={(e) => { + setEditedValue(e.target.value); + debouncedUpdateFilter(index, { ...filter, value: e.target.value }); + }} /> (null); - const { data: loggedCalls } = useLoggedCalls(); + const { data, isLoading } = useLoggedCalls(); + + const [loggedCalls, setLoggedCalls] = useState(data); + + useEffect(() => { + // persist data while loading + if (!isLoading) { + setLoggedCalls(data); + } + }, [data, isLoading]); return ( diff --git a/app/src/server/api/routers/externalApi.router.ts b/app/src/server/api/routers/externalApi.router.ts index e8c3231..0195864 100644 --- a/app/src/server/api/routers/externalApi.router.ts +++ b/app/src/server/api/routers/externalApi.router.ts @@ -186,6 +186,7 @@ export const externalApiRouter = createTRPCRouter({ ]); const tagsToCreate = Object.entries(input.tags ?? {}).map(([name, value]) => ({ + projectId: key.projectId, loggedCallId: newLoggedCallId, // sanitize tags name: name.replaceAll(/[^a-zA-Z0-9_]/g, "_"), diff --git a/client-libs/typescript/src/openai/index.ts b/client-libs/typescript/src/openai/index.ts index ca1e231..7d5bd64 100644 --- a/client-libs/typescript/src/openai/index.ts +++ b/client-libs/typescript/src/openai/index.ts @@ -15,7 +15,8 @@ export default class OpenAI extends openai.OpenAI { constructor({ openPipeApiKey = readEnv("OPENPIPE_API_KEY"), - openPipeBaseUrl = readEnv("OPENPIPE_BASE_URL") ?? `https://app.openpipe.ai/v1`, + openPipeBaseUrl = readEnv("OPENPIPE_BASE_URL") ?? + `https://app.openpipe.ai/v1`, ...opts }: ClientOptions = {}) { super({ ...opts }); @@ -73,37 +74,36 @@ class ExtendedCompletions extends openai.OpenAI.Chat.Completions { options?: RequestOptions, tags?: Record ): Promise { - // // Your pre API call logic here - // console.log("Doing pre API call..."); + // Your pre API call logic here + console.log("Doing pre API call..."); - // // Determine the type of request - // if (params.hasOwnProperty("stream") && params.stream === true) { - // const result = await super.create( - // params as CompletionCreateParams.CreateChatCompletionRequestStreaming, - // options - // ); - // // Your post API call logic here - // console.log("Doing post API call for Streaming..."); - // return result; - // } else { - // const requestedAt = Date.now(); - const result = await super.create( - params as CompletionCreateParams.CreateChatCompletionRequestNonStreaming, - options - ); - return result; - // await this.openaiInstance.openPipeApi?.externalApiReport({ - // requestedAt, - // receivedAt: Date.now(), - // reqPayload: params, - // respPayload: result, - // statusCode: 200, - // errorMessage: undefined, - // tags, - // }); + // Determine the type of request + if (params.hasOwnProperty("stream") && params.stream === true) { + const result = await super.create( + params as CompletionCreateParams.CreateChatCompletionRequestStreaming, + options + ); + // Your post API call logic here + console.log("Doing post API call for Streaming..."); + return result; + } else { + const requestedAt = Date.now(); + const result = await super.create( + params as CompletionCreateParams.CreateChatCompletionRequestNonStreaming, + options + ); + await this.openaiInstance.openPipeApi?.externalApiReport({ + requestedAt, + receivedAt: Date.now(), + reqPayload: params, + respPayload: result, + statusCode: 200, + errorMessage: undefined, + tags, + }); - // console.log("GOT RESULT", result); - // return result; - // } + console.log("GOT RESULT", result); + return result; + } } }