Debounce filter value updates

This commit is contained in:
David Corbitt
2023-08-14 21:00:42 -07:00
parent 05e774d021
commit 649dc3376b
7 changed files with 87 additions and 37 deletions

View File

@@ -0,0 +1,19 @@
-- DropIndex
DROP INDEX "LoggedCallTag_name_idx";
DROP INDEX "LoggedCallTag_name_value_idx";
-- AlterTable: Add projectId column without NOT NULL constraint for now
ALTER TABLE "LoggedCallTag" ADD COLUMN "projectId" UUID;
-- Set the default value
UPDATE "LoggedCallTag" lct
SET "projectId" = lc."projectId"
FROM "LoggedCall" lc
WHERE lct."loggedCallId" = lc.id;
-- Now set the NOT NULL constraint
ALTER TABLE "LoggedCallTag" ALTER COLUMN "projectId" SET NOT NULL;
-- CreateIndex
CREATE INDEX "LoggedCallTag_projectId_name_idx" ON "LoggedCallTag"("projectId", "name");
CREATE INDEX "LoggedCallTag_projectId_name_value_idx" ON "LoggedCallTag"("projectId", "name", "value");

View File

@@ -326,12 +326,13 @@ model LoggedCallTag {
id String @id @default(uuid()) @db.Uuid
name String
value String?
projectId String @db.Uuid
loggedCallId String @db.Uuid
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
@@index([name])
@@index([name, value])
@@index([projectId, name])
@@index([projectId, name, value])
}
model ApiKey {

View File

@@ -389,6 +389,7 @@ for (let i = 0; i < 1437; i++) {
},
});
loggedCallTagsToCreate.push({
projectId: project.id,
loggedCallId,
name: "$model",
value: template.reqPayload.model,

View File

@@ -1,9 +1,11 @@
import { useCallback, useState } from "react";
import { HStack, IconButton, Input, Select } from "@chakra-ui/react";
import { BsTrash } from "react-icons/bs";
import { type LogFilter, comparators } from "~/state/logFiltersSlice";
import { useAppStore } from "~/state/store";
import { useFilterableFields } from "~/utils/hooks";
import { debounce } from "lodash-es";
const LogFilter = ({ filter, index }: { filter: LogFilter; index: number }) => {
const filterableFields = useFilterableFields();
@@ -13,6 +15,20 @@ const LogFilter = ({ filter, index }: { filter: LogFilter; index: number }) => {
const { field, comparator, value } = filter;
const [editedValue, setEditedValue] = useState("");
const debouncedUpdateFilter = useCallback(
debounce(
(index: number, filter: LogFilter) => {
console.log("updating filter!!!");
updateFilter(index, filter);
},
200,
{ leading: true },
),
[updateFilter],
);
return (
<HStack>
<Select
@@ -41,8 +57,11 @@ const LogFilter = ({ filter, index }: { filter: LogFilter; index: number }) => {
))}
</Select>
<Input
value={value}
onChange={(e) => updateFilter(index, { ...filter, value: e.target.value })}
value={editedValue}
onChange={(e) => {
setEditedValue(e.target.value);
debouncedUpdateFilter(index, { ...filter, value: e.target.value });
}}
/>
<IconButton
aria-label="Delete Filter"

View File

@@ -1,11 +1,20 @@
import { Card, Table, Tbody } from "@chakra-ui/react";
import { useState } from "react";
import { useState, useEffect } from "react";
import { useLoggedCalls } from "~/utils/hooks";
import { TableHeader, TableRow } from "./TableRow";
export default function LoggedCallsTable() {
const [expandedRow, setExpandedRow] = useState<string | null>(null);
const { data: loggedCalls } = useLoggedCalls();
const { data, isLoading } = useLoggedCalls();
const [loggedCalls, setLoggedCalls] = useState(data);
useEffect(() => {
// persist data while loading
if (!isLoading) {
setLoggedCalls(data);
}
}, [data, isLoading]);
return (
<Card width="100%" overflow="hidden">

View File

@@ -186,6 +186,7 @@ export const externalApiRouter = createTRPCRouter({
]);
const tagsToCreate = Object.entries(input.tags ?? {}).map(([name, value]) => ({
projectId: key.projectId,
loggedCallId: newLoggedCallId,
// sanitize tags
name: name.replaceAll(/[^a-zA-Z0-9_]/g, "_"),

View File

@@ -15,7 +15,8 @@ export default class OpenAI extends openai.OpenAI {
constructor({
openPipeApiKey = readEnv("OPENPIPE_API_KEY"),
openPipeBaseUrl = readEnv("OPENPIPE_BASE_URL") ?? `https://app.openpipe.ai/v1`,
openPipeBaseUrl = readEnv("OPENPIPE_BASE_URL") ??
`https://app.openpipe.ai/v1`,
...opts
}: ClientOptions = {}) {
super({ ...opts });
@@ -73,37 +74,36 @@ class ExtendedCompletions extends openai.OpenAI.Chat.Completions {
options?: RequestOptions,
tags?: Record<string, string>
): Promise<any> {
// // Your pre API call logic here
// console.log("Doing pre API call...");
// Your pre API call logic here
console.log("Doing pre API call...");
// // Determine the type of request
// if (params.hasOwnProperty("stream") && params.stream === true) {
// const result = await super.create(
// params as CompletionCreateParams.CreateChatCompletionRequestStreaming,
// options
// );
// // Your post API call logic here
// console.log("Doing post API call for Streaming...");
// return result;
// } else {
// const requestedAt = Date.now();
const result = await super.create(
params as CompletionCreateParams.CreateChatCompletionRequestNonStreaming,
options
);
return result;
// await this.openaiInstance.openPipeApi?.externalApiReport({
// requestedAt,
// receivedAt: Date.now(),
// reqPayload: params,
// respPayload: result,
// statusCode: 200,
// errorMessage: undefined,
// tags,
// });
// Determine the type of request
if (params.hasOwnProperty("stream") && params.stream === true) {
const result = await super.create(
params as CompletionCreateParams.CreateChatCompletionRequestStreaming,
options
);
// Your post API call logic here
console.log("Doing post API call for Streaming...");
return result;
} else {
const requestedAt = Date.now();
const result = await super.create(
params as CompletionCreateParams.CreateChatCompletionRequestNonStreaming,
options
);
await this.openaiInstance.openPipeApi?.externalApiReport({
requestedAt,
receivedAt: Date.now(),
reqPayload: params,
respPayload: result,
statusCode: 200,
errorMessage: undefined,
tags,
});
// console.log("GOT RESULT", result);
// return result;
// }
console.log("GOT RESULT", result);
return result;
}
}
}