Compare commits

..

16 Commits

Author SHA1 Message Date
Kyle Corbitt
10dd53e7f6 Run workers in a separate Docker container
We've outgrown the run-everything-on-one-machine setup. This change moves background jobs to a different Docker image in production. It also adds a `jobKey` to certain jobs so if we try to process the same cell multiple times it'll only actually run the job once.
2023-08-18 11:16:00 -07:00
Kyle Corbitt
b1802fc04b Merge pull request #176 from OpenPipe/more-js
Streaming + logging works in Typescript SDK
2023-08-18 08:56:56 -07:00
Kyle Corbitt
f2135ddc72 Streaming + logging works in Typescript SDK
Also added some high-level tests to minimize the chances that we're breaking anything.

The typescript SDK is mostly functional at this point, with the exception that we don't have a build process or way to import it when deployed as an NPM package.
2023-08-18 08:53:08 -07:00
arcticfly
ca89eafb0b Create new uiId for forked variants and scenarios (#175)
* Create new uiIds for forked variants and scenarios

* Add replaceVariant.mutateAsync to onSave dependencies
2023-08-18 08:09:07 -07:00
arcticfly
b50d47beaf Square header border when scrolled down (#174)
* Square header border when scrolled down

* Remove unused import
2023-08-18 01:41:47 -07:00
arcticfly
733d53625b Add Gryphe/MythoMax-L2-13b (#173) 2023-08-18 00:37:16 -07:00
arcticfly
a5e59e4235 Allow user to delete scenario without variables (#172)
* Allow user to delete scenario without variables

* Hide expand button for empty scenario editor

* Add header to scenario modal
2023-08-18 00:08:32 -07:00
Kyle Corbitt
d0102e3202 Merge pull request #171 from OpenPipe/experiment-slug
Use shorter experiment IDs
2023-08-17 23:33:30 -07:00
Kyle Corbitt
bd571c4c4e Merge pull request #170 from OpenPipe/jobs-log
Enqueue tasks more efficiently
2023-08-17 23:33:20 -07:00
Kyle Corbitt
296eb23d97 Use shorter experiment IDs
Because https://app.openpipe.ai/experiments/B1EtN6oHeXMele2 is a cooler URL than https://app.openpipe.ai/experiments/3692942c-6f1b-4bef-83b1-c11f00a3fbdd
2023-08-17 23:28:56 -07:00
Kyle Corbitt
4e2ae7a441 Enqueue tasks more efficiently
Previously we were opening a new database connection for each task we added. Not a problem at small scale but kinda overwhelming for Postgres now that we have more usage.
2023-08-17 22:42:46 -07:00
Kyle Corbitt
072dcee376 Merge pull request #168 from OpenPipe/jobs-log
Admin dashboard for jobs
2023-08-17 22:26:10 -07:00
Kyle Corbitt
94464c0617 Admin dashboard for jobs
Extremely simple jobs dashboard to sanity-check what we've got going on in the job queue.
2023-08-17 22:20:39 -07:00
arcticfly
980644f13c Support vicuna system message (#167)
* Support vicuna system message

* Change tags to USER and ASSISTANT
2023-08-17 21:02:27 -07:00
arcticfly
6a56250001 Add platypus 13b, vicuna 13b, and nous hermes 7b (#166)
* Add platypus

* Add vicuna 13b and nous hermes 7b
2023-08-17 20:01:10 -07:00
Kyle Corbitt
b1c7bbbd4a Merge pull request #165 from OpenPipe/better-output
Don't define CellWrapper inline
2023-08-17 19:07:32 -07:00
57 changed files with 1396 additions and 619 deletions

View File

@@ -12,6 +12,7 @@ declare module "nextjs-routes" {
export type Route = export type Route =
| StaticRoute<"/account/signin"> | StaticRoute<"/account/signin">
| StaticRoute<"/admin/jobs">
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }> | DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
| StaticRoute<"/api/experiments/og-image"> | StaticRoute<"/api/experiments/og-image">
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }> | DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
@@ -20,7 +21,7 @@ declare module "nextjs-routes" {
| StaticRoute<"/dashboard"> | StaticRoute<"/dashboard">
| DynamicRoute<"/data/[id]", { "id": string }> | DynamicRoute<"/data/[id]", { "id": string }>
| StaticRoute<"/data"> | StaticRoute<"/data">
| DynamicRoute<"/experiments/[id]", { "id": string }> | DynamicRoute<"/experiments/[experimentSlug]", { "experimentSlug": string }>
| StaticRoute<"/experiments"> | StaticRoute<"/experiments">
| StaticRoute<"/"> | StaticRoute<"/">
| DynamicRoute<"/invitations/[invitationToken]", { "invitationToken": string }> | DynamicRoute<"/invitations/[invitationToken]", { "invitationToken": string }>

View File

@@ -18,6 +18,7 @@
"lint": "next lint", "lint": "next lint",
"start": "TZ=UTC next start", "start": "TZ=UTC next start",
"codegen:clients": "tsx src/server/scripts/client-codegen.ts", "codegen:clients": "tsx src/server/scripts/client-codegen.ts",
"codegen:db": "prisma generate && kysely-codegen --dialect postgres --out-file src/server/db.types.ts",
"seed": "tsx prisma/seed.ts", "seed": "tsx prisma/seed.ts",
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'", "check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'",
"test": "pnpm vitest" "test": "pnpm vitest"
@@ -65,6 +66,7 @@
"json-stringify-pretty-compact": "^4.0.0", "json-stringify-pretty-compact": "^4.0.0",
"jsonschema": "^1.4.1", "jsonschema": "^1.4.1",
"kysely": "^0.26.1", "kysely": "^0.26.1",
"kysely-codegen": "^0.10.1",
"lodash-es": "^4.17.21", "lodash-es": "^4.17.21",
"lucide-react": "^0.265.0", "lucide-react": "^0.265.0",
"marked": "^7.0.3", "marked": "^7.0.3",

View File

@@ -0,0 +1,88 @@
/*
* Copyright 2023 Viascom Ltd liab. Co
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION nanoid(
size int DEFAULT 21,
alphabet text DEFAULT '_-0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
)
RETURNS text
LANGUAGE plpgsql
volatile
AS
$$
DECLARE
idBuilder text := '';
counter int := 0;
bytes bytea;
alphabetIndex int;
alphabetArray text[];
alphabetLength int;
mask int;
step int;
BEGIN
alphabetArray := regexp_split_to_array(alphabet, '');
alphabetLength := array_length(alphabetArray, 1);
mask := (2 << cast(floor(log(alphabetLength - 1) / log(2)) as int)) - 1;
step := cast(ceil(1.6 * mask * size / alphabetLength) AS int);
while true
loop
bytes := gen_random_bytes(step);
while counter < step
loop
alphabetIndex := (get_byte(bytes, counter) & mask) + 1;
if alphabetIndex <= alphabetLength then
idBuilder := idBuilder || alphabetArray[alphabetIndex];
if length(idBuilder) = size then
return idBuilder;
end if;
end if;
counter := counter + 1;
end loop;
counter := 0;
end loop;
END
$$;
-- Make a short_nanoid function that uses the default alphabet and length of 15
CREATE OR REPLACE FUNCTION short_nanoid()
RETURNS text
LANGUAGE plpgsql
volatile
AS
$$
BEGIN
RETURN nanoid(15, '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
END
$$;
-- AlterTable
ALTER TABLE "Experiment" ADD COLUMN "slug" TEXT NOT NULL DEFAULT short_nanoid();
-- For existing experiments, keep the existing id as the slug for backwards compatibility
UPDATE "Experiment" SET "slug" = "id";
-- CreateIndex
CREATE UNIQUE INDEX "Experiment_slug_key" ON "Experiment"("slug");

View File

@@ -12,6 +12,8 @@ datasource db {
model Experiment { model Experiment {
id String @id @default(uuid()) @db.Uuid id String @id @default(uuid()) @db.Uuid
slug String @unique @default(dbgenerated("short_nanoid()"))
label String label String
sortIndex Int @default(0) sortIndex Int @default(0)
@@ -412,10 +414,10 @@ model UserInvitation {
invitationToken String @unique invitationToken String @unique
senderId String @db.Uuid senderId String @db.Uuid
sender User @relation(fields: [senderId], references: [id], onDelete: Cascade) sender User @relation(fields: [senderId], references: [id], onDelete: Cascade)
@@unique([projectId, email])
createdAt DateTime @default(now()) createdAt DateTime @default(now())
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt
@@unique([projectId, email])
} }
model VerificationToken { model VerificationToken {

View File

@@ -10,6 +10,14 @@ await prisma.project.deleteMany({
where: { id: defaultId }, where: { id: defaultId },
}); });
// Mark all users as admins
await prisma.user.updateMany({
where: {},
data: {
role: "ADMIN",
},
});
// If there's an existing project, just seed into it // If there's an existing project, just seed into it
const project = const project =
(await prisma.project.findFirst({})) ?? (await prisma.project.findFirst({})) ??
@@ -18,12 +26,16 @@ const project =
})); }));
if (env.OPENPIPE_API_KEY) { if (env.OPENPIPE_API_KEY) {
await prisma.apiKey.create({ await prisma.apiKey.upsert({
data: { where: {
apiKey: env.OPENPIPE_API_KEY,
},
create: {
projectId: project.id, projectId: project.id,
name: "Default API Key", name: "Default API Key",
apiKey: env.OPENPIPE_API_KEY, apiKey: env.OPENPIPE_API_KEY,
}, },
update: {},
}); });
} }

View File

@@ -10,6 +10,4 @@ pnpm tsx src/promptConstructor/migrate.ts
echo "Starting the server" echo "Starting the server"
pnpm concurrently --kill-others \ pnpm start
"pnpm start" \
"pnpm tsx src/server/tasks/worker.ts"

View File

@@ -8,7 +8,7 @@ import {
useHandledAsyncCallback, useHandledAsyncCallback,
useVisibleScenarioIds, useVisibleScenarioIds,
} from "~/utils/hooks"; } from "~/utils/hooks";
import { cellPadding } from "../constants"; import { cellPadding } from "./constants";
import { ActionButton } from "./ScenariosHeader"; import { ActionButton } from "./ScenariosHeader";
export default function AddVariantButton() { export default function AddVariantButton() {

View File

@@ -16,7 +16,7 @@ import {
VStack, VStack,
} from "@chakra-ui/react"; } from "@chakra-ui/react";
import { BsArrowsAngleExpand, BsX } from "react-icons/bs"; import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
import { cellPadding } from "../constants"; import { cellPadding } from "./constants";
import { FloatingLabelInput } from "./FloatingLabelInput"; import { FloatingLabelInput } from "./FloatingLabelInput";
import { ScenarioEditorModal } from "./ScenarioEditorModal"; import { ScenarioEditorModal } from "./ScenarioEditorModal";
@@ -111,14 +111,11 @@ export default function ScenarioEditor({
onDrop={onReorder} onDrop={onReorder}
backgroundColor={isDragTarget ? "gray.100" : "transparent"} backgroundColor={isDragTarget ? "gray.100" : "transparent"}
> >
{variableLabels.length === 0 ? ( {
<Box color="gray.500">
{vars.data ? "No scenario variables configured" : "Loading..."}
</Box>
) : (
<VStack spacing={4} flex={1} py={2}> <VStack spacing={4} flex={1} py={2}>
<HStack justifyContent="space-between" w="100%" align="center" spacing={0}> <HStack justifyContent="space-between" w="100%" align="center" spacing={0}>
<Text flex={1}>Scenario</Text> <Text flex={1}>Scenario</Text>
{variableLabels.length && (
<Tooltip label="Expand" hasArrow> <Tooltip label="Expand" hasArrow>
<IconButton <IconButton
aria-label="Expand" aria-label="Expand"
@@ -130,6 +127,7 @@ export default function ScenarioEditor({
variant="ghost" variant="ghost"
/> />
</Tooltip> </Tooltip>
)}
{canModify && props.canHide && ( {canModify && props.canHide && (
<Tooltip label="Delete" hasArrow> <Tooltip label="Delete" hasArrow>
<IconButton <IconButton
@@ -150,7 +148,13 @@ export default function ScenarioEditor({
</Tooltip> </Tooltip>
)} )}
</HStack> </HStack>
{variableLabels.map((key) => {
{variableLabels.length === 0 ? (
<Box color="gray.500">
{vars.data ? "No scenario variables configured" : "Loading..."}
</Box>
) : (
variableLabels.map((key) => {
const value = values[key] ?? ""; const value = values[key] ?? "";
return ( return (
<FloatingLabelInput <FloatingLabelInput
@@ -174,7 +178,8 @@ export default function ScenarioEditor({
onMouseLeave={() => setVariableInputHovered(false)} onMouseLeave={() => setVariableInputHovered(false)}
/> />
); );
})} })
)}
{hasChanged && ( {hasChanged && (
<HStack justify="right"> <HStack justify="right">
<Button <Button
@@ -192,7 +197,7 @@ export default function ScenarioEditor({
</HStack> </HStack>
)} )}
</VStack> </VStack>
)} }
</HStack> </HStack>
{scenarioEditorModalOpen && ( {scenarioEditorModalOpen && (
<ScenarioEditorModal <ScenarioEditorModal

View File

@@ -65,11 +65,11 @@ export const ScenarioEditorModal = ({
<Modal <Modal
isOpen isOpen
onClose={onClose} onClose={onClose}
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "5xl", xl: "7xl" }} size={{ base: "xl", sm: "2xl", md: "3xl", lg: "4xl", xl: "5xl" }}
> >
<ModalOverlay /> <ModalOverlay />
<ModalContent w={1200}> <ModalContent w={1200}>
<ModalHeader /> <ModalHeader>Edit Scenario</ModalHeader>
<ModalCloseButton /> <ModalCloseButton />
<ModalBody maxW="unset"> <ModalBody maxW="unset">
<VStack spacing={8}> <VStack spacing={8}>

View File

@@ -11,7 +11,7 @@ import {
IconButton, IconButton,
Spinner, Spinner,
} from "@chakra-ui/react"; } from "@chakra-ui/react";
import { cellPadding } from "../constants"; import { cellPadding } from "./constants";
import { import {
useExperiment, useExperiment,
useExperimentAccess, useExperimentAccess,

View File

@@ -110,7 +110,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
setIsChanged(false); setIsChanged(false);
await utils.promptVariants.list.invalidate(); await utils.promptVariants.list.invalidate();
}, [checkForChanges]); }, [checkForChanges, replaceVariant.mutateAsync]);
useEffect(() => { useEffect(() => {
if (monaco) { if (monaco) {

View File

@@ -1,11 +1,11 @@
import { useState, type DragEvent } from "react"; import { useState, type DragEvent } from "react";
import { type PromptVariant } from "../OutputsTable/types"; import { type PromptVariant } from "../types";
import { api } from "~/utils/api"; import { api } from "~/utils/api";
import { RiDraggable } from "react-icons/ri"; import { RiDraggable } from "react-icons/ri";
import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks"; import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here
import { cellPadding, headerMinHeight } from "../constants"; import { cellPadding, headerMinHeight } from "../constants";
import AutoResizeTextArea from "../AutoResizeTextArea"; import AutoResizeTextArea from "../../AutoResizeTextArea";
import VariantHeaderMenuButton from "./VariantHeaderMenuButton"; import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
export default function VariantHeader( export default function VariantHeader(
@@ -75,7 +75,7 @@ export default function VariantHeader(
padding={0} padding={0}
sx={{ sx={{
position: "sticky", position: "sticky",
top: "-2", top: "0",
// Ensure that the menu always appears above the sticky header of other variants // Ensure that the menu always appears above the sticky header of other variants
zIndex: menuOpen ? "dropdown" : 10, zIndex: menuOpen ? "dropdown" : 10,
}} }}

View File

@@ -1,6 +1,4 @@
import { type PromptVariant } from "../OutputsTable/types"; import { useState } from "react";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
import { import {
Icon, Icon,
Menu, Menu,
@@ -14,10 +12,13 @@ import {
} from "@chakra-ui/react"; } from "@chakra-ui/react";
import { BsFillTrashFill, BsGear, BsStars } from "react-icons/bs"; import { BsFillTrashFill, BsGear, BsStars } from "react-icons/bs";
import { FaRegClone } from "react-icons/fa"; import { FaRegClone } from "react-icons/fa";
import { useState } from "react";
import { RefinePromptModal } from "../RefinePromptModal/RefinePromptModal";
import { RiExchangeFundsFill } from "react-icons/ri"; import { RiExchangeFundsFill } from "react-icons/ri";
import { ChangeModelModal } from "../ChangeModelModal/ChangeModelModal";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
import { type PromptVariant } from "../types";
import { RefinePromptModal } from "../../RefinePromptModal/RefinePromptModal";
import { ChangeModelModal } from "../../ChangeModelModal/ChangeModelModal";
export default function VariantHeaderMenuButton({ export default function VariantHeaderMenuButton({
variant, variant,

View File

@@ -1,6 +1,6 @@
import { HStack, Icon, Text, useToken } from "@chakra-ui/react"; import { HStack, Icon, Text, useToken } from "@chakra-ui/react";
import { type PromptVariant } from "./types"; import { type PromptVariant } from "./types";
import { cellPadding } from "../constants"; import { cellPadding } from "./constants";
import { api } from "~/utils/api"; import { api } from "~/utils/api";
import chroma from "chroma-js"; import chroma from "chroma-js";
import { BsCurrencyDollar } from "react-icons/bs"; import { BsCurrencyDollar } from "react-icons/bs";

View File

@@ -3,13 +3,14 @@ import { api } from "~/utils/api";
import AddVariantButton from "./AddVariantButton"; import AddVariantButton from "./AddVariantButton";
import ScenarioRow from "./ScenarioRow"; import ScenarioRow from "./ScenarioRow";
import VariantEditor from "./VariantEditor"; import VariantEditor from "./VariantEditor";
import VariantHeader from "../VariantHeader/VariantHeader"; import VariantHeader from "./VariantHeader/VariantHeader";
import VariantStats from "./VariantStats"; import VariantStats from "./VariantStats";
import { ScenariosHeader } from "./ScenariosHeader"; import { ScenariosHeader } from "./ScenariosHeader";
import { borders } from "./styles"; import { borders } from "./styles";
import { useScenarios } from "~/utils/hooks"; import { useScenarios } from "~/utils/hooks";
import ScenarioPaginator from "./ScenarioPaginator"; import ScenarioPaginator from "./ScenarioPaginator";
import { Fragment } from "react"; import { Fragment } from "react";
import useScrolledPast from "./useHasScrolledPast";
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) { export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
const variants = api.promptVariants.list.useQuery( const variants = api.promptVariants.list.useQuery(
@@ -18,6 +19,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
); );
const scenarios = useScenarios(); const scenarios = useScenarios();
const shouldFlattenHeader = useScrolledPast(50);
if (!variants.data || !scenarios.data) return null; if (!variants.data || !scenarios.data) return null;
@@ -63,8 +65,8 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
variant={variant} variant={variant}
canHide={variants.data.length > 1} canHide={variants.data.length > 1}
rowStart={1} rowStart={1}
borderTopLeftRadius={isFirst ? 8 : 0} borderTopLeftRadius={isFirst && !shouldFlattenHeader ? 8 : 0}
borderTopRightRadius={isLast ? 8 : 0} borderTopRightRadius={isLast && !shouldFlattenHeader ? 8 : 0}
{...sharedProps} {...sharedProps}
/> />
<GridItem rowStart={2} {...sharedProps}> <GridItem rowStart={2} {...sharedProps}>

View File

@@ -0,0 +1,34 @@
import { useState, useEffect } from "react";
const useScrolledPast = (scrollThreshold: number) => {
const [hasScrolledPast, setHasScrolledPast] = useState(true);
useEffect(() => {
const container = document.getElementById("output-container");
if (!container) {
console.warn('Element with id "outputs-container" not found.');
return;
}
const checkScroll = () => {
const { scrollTop } = container;
// Check if scrollTop is greater than or equal to scrollThreshold
setHasScrolledPast(scrollTop > scrollThreshold);
};
checkScroll();
container.addEventListener("scroll", checkScroll);
// Cleanup
return () => {
container.removeEventListener("scroll", checkScroll);
};
}, []);
return hasScrolledPast;
};
export default useScrolledPast;

View File

@@ -14,21 +14,11 @@ import { formatTimePast } from "~/utils/dayjs";
import Link from "next/link"; import Link from "next/link";
import { useRouter } from "next/router"; import { useRouter } from "next/router";
import { BsPlusSquare } from "react-icons/bs"; import { BsPlusSquare } from "react-icons/bs";
import { api } from "~/utils/api"; import { RouterOutputs, api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks"; import { useHandledAsyncCallback } from "~/utils/hooks";
import { useAppStore } from "~/state/store"; import { useAppStore } from "~/state/store";
type ExperimentData = { export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["list"][0] }) => {
testScenarioCount: number;
promptVariantCount: number;
id: string;
label: string;
sortIndex: number;
createdAt: Date;
updatedAt: Date;
};
export const ExperimentCard = ({ exp }: { exp: ExperimentData }) => {
return ( return (
<Card <Card
w="full" w="full"
@@ -45,7 +35,7 @@ export const ExperimentCard = ({ exp }: { exp: ExperimentData }) => {
as={Link} as={Link}
w="full" w="full"
h="full" h="full"
href={{ pathname: "/experiments/[id]", query: { id: exp.id } }} href={{ pathname: "/experiments/[experimentSlug]", query: { experimentSlug: exp.slug } }}
justify="space-between" justify="space-between"
> >
<HStack w="full" color="gray.700" justify="center"> <HStack w="full" color="gray.700" justify="center">
@@ -89,8 +79,8 @@ export const NewExperimentCard = () => {
projectId: selectedProjectId ?? "", projectId: selectedProjectId ?? "",
}); });
await router.push({ await router.push({
pathname: "/experiments/[id]", pathname: "/experiments/[experimentSlug]",
query: { id: newExperiment.id }, query: { experimentSlug: newExperiment.slug },
}); });
}, [createMutation, router, selectedProjectId]); }, [createMutation, router, selectedProjectId]);

View File

@@ -16,11 +16,14 @@ export const useOnForkButtonPressed = () => {
const [onFork, isForking] = useHandledAsyncCallback(async () => { const [onFork, isForking] = useHandledAsyncCallback(async () => {
if (!experiment.data?.id || !selectedProjectId) return; if (!experiment.data?.id || !selectedProjectId) return;
const forkedExperimentId = await forkMutation.mutateAsync({ const newExperiment = await forkMutation.mutateAsync({
id: experiment.data.id, id: experiment.data.id,
projectId: selectedProjectId, projectId: selectedProjectId,
}); });
await router.push({ pathname: "/experiments/[id]", query: { id: forkedExperimentId } }); await router.push({
pathname: "/experiments/[experimentSlug]",
query: { experimentSlug: newExperiment.slug },
});
}, [forkMutation, experiment.data?.id, router]); }, [forkMutation, experiment.data?.id, router]);
const onForkButtonPressed = useCallback(() => { const onForkButtonPressed = useCallback(() => {

View File

@@ -67,7 +67,13 @@ export default function ProjectMenu() {
); );
return ( return (
<VStack w="full" alignItems="flex-start" spacing={0} py={1}> <VStack
w="full"
alignItems="flex-start"
spacing={0}
py={1}
zIndex={popover.isOpen ? "dropdown" : undefined}
>
<Popover <Popover
placement="bottom" placement="bottom"
isOpen={popover.isOpen} isOpen={popover.isOpen}

View File

@@ -26,6 +26,10 @@ export const env = createEnv({
SMTP_PORT: z.string().default("placeholder"), SMTP_PORT: z.string().default("placeholder"),
SMTP_LOGIN: z.string().default("placeholder"), SMTP_LOGIN: z.string().default("placeholder"),
SMTP_PASSWORD: z.string().default("placeholder"), SMTP_PASSWORD: z.string().default("placeholder"),
WORKER_CONCURRENCY: z
.string()
.default("10")
.transform((val) => parseInt(val)),
}, },
/** /**
@@ -68,6 +72,7 @@ export const env = createEnv({
SMTP_PORT: process.env.SMTP_PORT, SMTP_PORT: process.env.SMTP_PORT,
SMTP_LOGIN: process.env.SMTP_LOGIN, SMTP_LOGIN: process.env.SMTP_LOGIN,
SMTP_PASSWORD: process.env.SMTP_PASSWORD, SMTP_PASSWORD: process.env.SMTP_PASSWORD,
WORKER_CONCURRENCY: process.env.WORKER_CONCURRENCY,
}, },
/** /**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. * Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.

View File

@@ -12,7 +12,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", { definePrompt("openai/ChatCompletion", {
model: "gpt-4", model: "gpt-4",
stream: true,
messages: [ messages: [
{ {
role: "system", role: "system",
@@ -29,7 +28,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", { definePrompt("openai/ChatCompletion", {
model: "gpt-4", model: "gpt-4",
stream: true,
messages: [ messages: [
{ {
role: "system", role: "system",
@@ -126,7 +124,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", { definePrompt("openai/ChatCompletion", {
model: "gpt-4", model: "gpt-4",
stream: true,
messages: [ messages: [
{ {
role: "system", role: "system",
@@ -143,7 +140,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", { definePrompt("openai/ChatCompletion", {
model: "gpt-4", model: "gpt-4",
stream: true,
messages: [ messages: [
{ {
role: "system", role: "system",
@@ -237,7 +233,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", { definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo", model: "gpt-3.5-turbo",
stream: true,
messages: [ messages: [
{ {
role: "system", role: "system",

View File

@@ -3,10 +3,12 @@ import { type FrontendModelProvider } from "../types";
import { refinementActions } from "./refinementActions"; import { refinementActions } from "./refinementActions";
import { import {
templateOpenOrcaPrompt, templateOpenOrcaPrompt,
// templateAlpacaInstructPrompt, templateAlpacaInstructPrompt,
// templateSystemUserAssistantPrompt, // templateSystemUserAssistantPrompt,
templateInstructionInputResponsePrompt, templateInstructionInputResponsePrompt,
templateAiroborosPrompt, templateAiroborosPrompt,
templateGryphePrompt,
templateVicunaPrompt,
} from "./templatePrompt"; } from "./templatePrompt";
const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatOutput> = { const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatOutput> = {
@@ -22,15 +24,16 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatO
learnMoreUrl: "https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B", learnMoreUrl: "https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
templatePrompt: templateOpenOrcaPrompt, templatePrompt: templateOpenOrcaPrompt,
}, },
// "Open-Orca/OpenOrca-Platypus2-13B": { "Open-Orca/OpenOrca-Platypus2-13B": {
// name: "OpenOrca-Platypus2-13B", name: "OpenOrca-Platypus2-13B",
// contextWindow: 4096, contextWindow: 4096,
// pricePerSecond: 0.0003, pricePerSecond: 0.0003,
// speed: "medium", speed: "medium",
// provider: "openpipe/Chat", provider: "openpipe/Chat",
// learnMoreUrl: "https://huggingface.co/Open-Orca/OpenOrca-Platypus2-13B", learnMoreUrl: "https://huggingface.co/Open-Orca/OpenOrca-Platypus2-13B",
// templatePrompt: templateAlpacaInstructPrompt, templatePrompt: templateAlpacaInstructPrompt,
// }, defaultStopTokens: ["</s>"],
},
// "stabilityai/StableBeluga-13B": { // "stabilityai/StableBeluga-13B": {
// name: "StableBeluga-13B", // name: "StableBeluga-13B",
// contextWindow: 4096, // contextWindow: 4096,
@@ -58,6 +61,33 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatO
learnMoreUrl: "https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0", learnMoreUrl: "https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0",
templatePrompt: templateAiroborosPrompt, templatePrompt: templateAiroborosPrompt,
}, },
"lmsys/vicuna-13b-v1.5": {
name: "vicuna-13b-v1.5",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/lmsys/vicuna-13b-v1.5",
templatePrompt: templateVicunaPrompt,
},
"Gryphe/MythoMax-L2-13b": {
name: "MythoMax-L2-13b",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/Gryphe/MythoMax-L2-13b",
templatePrompt: templateGryphePrompt,
},
"NousResearch/Nous-Hermes-llama-2-7b": {
name: "Nous-Hermes-llama-2-7b",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b",
templatePrompt: templateInstructionInputResponsePrompt,
},
}, },
refinementActions, refinementActions,

View File

@@ -8,10 +8,13 @@ import frontendModelProvider from "./frontend";
const modelEndpoints: Record<OpenpipeChatInput["model"], string> = { const modelEndpoints: Record<OpenpipeChatInput["model"], string> = {
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B": "https://5ef82gjxk8kdys-8000.proxy.runpod.net/v1", "Open-Orca/OpenOrcaxOpenChat-Preview2-13B": "https://5ef82gjxk8kdys-8000.proxy.runpod.net/v1",
// "Open-Orca/OpenOrca-Platypus2-13B": "https://lt5qlel6qcji8t-8000.proxy.runpod.net/v1", "Open-Orca/OpenOrca-Platypus2-13B": "https://lt5qlel6qcji8t-8000.proxy.runpod.net/v1",
// "stabilityai/StableBeluga-13B": "https://vcorl8mxni2ou1-8000.proxy.runpod.net/v1", // "stabilityai/StableBeluga-13B": "https://vcorl8mxni2ou1-8000.proxy.runpod.net/v1",
"NousResearch/Nous-Hermes-Llama2-13b": "https://ncv8pw3u0vb8j2-8000.proxy.runpod.net/v1", "NousResearch/Nous-Hermes-Llama2-13b": "https://ncv8pw3u0vb8j2-8000.proxy.runpod.net/v1",
"jondurbin/airoboros-l2-13b-gpt4-2.0": "https://9nrbx7oph4btou-8000.proxy.runpod.net/v1", "jondurbin/airoboros-l2-13b-gpt4-2.0": "https://9nrbx7oph4btou-8000.proxy.runpod.net/v1",
"lmsys/vicuna-13b-v1.5": "https://h88hkt3ux73rb7-8000.proxy.runpod.net/v1",
"Gryphe/MythoMax-L2-13b": "https://3l5jvhnxdgky3v-8000.proxy.runpod.net/v1",
"NousResearch/Nous-Hermes-llama-2-7b": "https://ua1bpc6kv3dgge-8000.proxy.runpod.net/v1",
}; };
export async function getCompletion( export async function getCompletion(
@@ -36,10 +39,20 @@ export async function getCompletion(
const start = Date.now(); const start = Date.now();
let finalCompletion: OpenpipeChatOutput = ""; let finalCompletion: OpenpipeChatOutput = "";
const completionParams = {
model,
prompt: templatedPrompt,
...rest,
};
if (!completionParams.stop && frontendModelProvider.models[model].defaultStopTokens) {
completionParams.stop = frontendModelProvider.models[model].defaultStopTokens;
}
try { try {
if (onStream) { if (onStream) {
const resp = await openai.completions.create( const resp = await openai.completions.create(
{ model, prompt: templatedPrompt, ...rest, stream: true }, { ...completionParams, stream: true },
{ {
maxRetries: 0, maxRetries: 0,
}, },
@@ -58,7 +71,7 @@ export async function getCompletion(
} }
} else { } else {
const resp = await openai.completions.create( const resp = await openai.completions.create(
{ model, prompt: templatedPrompt, ...rest, stream: false }, { ...completionParams, stream: false },
{ {
maxRetries: 0, maxRetries: 0,
}, },

View File

@@ -6,10 +6,13 @@ import frontendModelProvider from "./frontend";
const supportedModels = [ const supportedModels = [
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B", "Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
// "Open-Orca/OpenOrca-Platypus2-13B", "Open-Orca/OpenOrca-Platypus2-13B",
// "stabilityai/StableBeluga-13B", // "stabilityai/StableBeluga-13B",
"NousResearch/Nous-Hermes-Llama2-13b", "NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0", "jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"Gryphe/MythoMax-L2-13b",
"NousResearch/Nous-Hermes-llama-2-7b",
] as const; ] as const;
export type SupportedModel = (typeof supportedModels)[number]; export type SupportedModel = (typeof supportedModels)[number];

View File

@@ -7,8 +7,12 @@
"type": "string", "type": "string",
"enum": [ "enum": [
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B", "Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
"Open-Orca/OpenOrca-Platypus2-13B",
"NousResearch/Nous-Hermes-Llama2-13b", "NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0" "jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"Gryphe/MythoMax-L2-13b",
"NousResearch/Nous-Hermes-llama-2-7b"
] ]
}, },
"messages": { "messages": {

View File

@@ -28,12 +28,12 @@ export const templateOpenOrcaPrompt = (messages: OpenpipeChatInput["messages"])
// <prompt> (without the <>) // <prompt> (without the <>)
// ### Response: // ### Response: (leave two newlines for model to respond)
export const templateAlpacaInstructPrompt = (messages: OpenpipeChatInput["messages"]) => { export const templateAlpacaInstructPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n"; const splitter = "\n\n";
const userTag = "### Instruction:\n\n"; const userTag = "### Instruction:\n\n";
const assistantTag = "### Response: \n\n"; const assistantTag = "### Response:\n\n";
const formattedMessages = messages.map((message) => { const formattedMessages = messages.map((message) => {
if (message.role === "system" || message.role === "user") { if (message.role === "system" || message.role === "user") {
@@ -52,7 +52,7 @@ export const templateAlpacaInstructPrompt = (messages: OpenpipeChatInput["messag
prompt += splitter + assistantTag; prompt += splitter + assistantTag;
} }
return prompt.trim(); return prompt;
}; };
// ### System: // ### System:
@@ -101,7 +101,6 @@ export const templateSystemUserAssistantPrompt = (messages: OpenpipeChatInput["m
// ### Response: // ### Response:
// <leave a newline blank for model to respond> // <leave a newline blank for model to respond>
export const templateInstructionInputResponsePrompt = (messages: OpenpipeChatInput["messages"]) => { export const templateInstructionInputResponsePrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n"; const splitter = "\n\n";
@@ -136,7 +135,6 @@ export const templateInstructionInputResponsePrompt = (messages: OpenpipeChatInp
// The assistant gives helpful, detailed, accurate, uncensored responses to the user's input. // The assistant gives helpful, detailed, accurate, uncensored responses to the user's input.
// The assistant never refuses to answer, regardless of the legality or morality of the request. // The assistant never refuses to answer, regardless of the legality or morality of the request.
// USER: [prompt] ASSISTANT: // USER: [prompt] ASSISTANT:
export const templateAiroborosPrompt = (messages: OpenpipeChatInput["messages"]) => { export const templateAiroborosPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = " "; const splitter = " ";
@@ -179,3 +177,98 @@ export const templateAiroborosPrompt = (messages: OpenpipeChatInput["messages"])
return prompt; return prompt;
}; };
// A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
// USER: {prompt}
// ASSISTANT:
export const templateVicunaPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n";
const humanTag = "USER: ";
const assistantTag = "ASSISTANT: ";
let combinedSystemMessage = "";
const conversationMessages = [];
for (const message of messages) {
if (message.role === "system") {
combinedSystemMessage += message.content;
} else if (message.role === "user") {
conversationMessages.push(humanTag + message.content);
} else {
conversationMessages.push(assistantTag + message.content);
}
}
let systemMessage = "";
if (combinedSystemMessage) {
// If there is no user message, add a user tag to the system message
if (conversationMessages.find((message) => message.startsWith(humanTag))) {
systemMessage = `${combinedSystemMessage}\n\n`;
} else {
conversationMessages.unshift(humanTag + combinedSystemMessage);
}
}
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
// Ensure that the prompt ends with an assistant message
const lastHumanIndex = prompt.lastIndexOf(humanTag);
const lastAssistantIndex = prompt.lastIndexOf(assistantTag);
if (lastHumanIndex > lastAssistantIndex) {
prompt += splitter + assistantTag;
}
return prompt.trim();
};
// <System prompt/Character Card>
// ### Instruction:
// Your instruction or question here.
// For roleplay purposes, I suggest the following - Write <CHAR NAME>'s next reply in a chat between <YOUR NAME> and <CHAR NAME>. Write a single reply only.
// ### Response:
export const templateGryphePrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n";
const instructionTag = "### Instruction:\n";
const responseTag = "### Response:\n";
let combinedSystemMessage = "";
const conversationMessages = [];
for (const message of messages) {
if (message.role === "system") {
combinedSystemMessage += message.content;
} else if (message.role === "user") {
conversationMessages.push(instructionTag + message.content);
} else {
conversationMessages.push(responseTag + message.content);
}
}
let systemMessage = "";
if (combinedSystemMessage) {
// If there is no user message, add a user tag to the system message
if (conversationMessages.find((message) => message.startsWith(instructionTag))) {
systemMessage = `${combinedSystemMessage}\n\n`;
} else {
conversationMessages.unshift(instructionTag + combinedSystemMessage);
}
}
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
// Ensure that the prompt ends with an assistant message
const lastInstructionIndex = prompt.lastIndexOf(instructionTag);
const lastAssistantIndex = prompt.lastIndexOf(responseTag);
if (lastInstructionIndex > lastAssistantIndex) {
prompt += splitter + responseTag;
}
return prompt;
};

View File

@@ -25,6 +25,7 @@ export type Model = {
learnMoreUrl?: string; learnMoreUrl?: string;
apiDocsUrl?: string; apiDocsUrl?: string;
templatePrompt?: (initialPrompt: OpenpipeChatInput["messages"]) => string; templatePrompt?: (initialPrompt: OpenpipeChatInput["messages"]) => string;
defaultStopTokens?: string[];
}; };
export type ProviderModel = { provider: z.infer<typeof ZodSupportedProvider>; model: string }; export type ProviderModel = { provider: z.infer<typeof ZodSupportedProvider>; model: string };

View File

@@ -0,0 +1,54 @@
import { Card, Table, Tbody, Td, Th, Thead, Tr } from "@chakra-ui/react";
import dayjs from "dayjs";
import { isDate, isObject, isString } from "lodash-es";
import AppShell from "~/components/nav/AppShell";
import { type RouterOutputs, api } from "~/utils/api";
const fieldsToShow: (keyof RouterOutputs["adminJobs"]["list"][0])[] = [
"id",
"queue_name",
"payload",
"priority",
"attempts",
"last_error",
"created_at",
"key",
"locked_at",
"run_at",
];
export default function Jobs() {
const jobs = api.adminJobs.list.useQuery({});
return (
<AppShell title="Admin Jobs">
<Card m={4} overflowX="auto">
<Table>
<Thead>
<Tr>
{fieldsToShow.map((field) => (
<Th key={field}>{field}</Th>
))}
</Tr>
</Thead>
<Tbody>
{jobs.data?.map((job) => (
<Tr key={job.id}>
{fieldsToShow.map((field) => {
// Check if object
let value = job[field];
if (isDate(value)) {
value = dayjs(value).format("YYYY-MM-DD HH:mm:ss");
} else if (isObject(value) && !isString(value)) {
value = JSON.stringify(value);
} // check if date
return <Td key={field}>{value}</Td>;
})}
</Tr>
))}
</Tbody>
</Table>
</Card>
</AppShell>
);
}

View File

@@ -33,9 +33,9 @@ export default function Experiment() {
const experiment = useExperiment(); const experiment = useExperiment();
const experimentStats = api.experiments.stats.useQuery( const experimentStats = api.experiments.stats.useQuery(
{ id: router.query.id as string }, { id: experiment.data?.id as string },
{ {
enabled: !!router.query.id, enabled: !!experiment.data?.id,
}, },
); );
const stats = experimentStats.data; const stats = experimentStats.data;
@@ -124,8 +124,8 @@ export default function Experiment() {
<ExperimentHeaderButtons /> <ExperimentHeaderButtons />
</PageHeaderContainer> </PageHeaderContainer>
<ExperimentSettingsDrawer /> <ExperimentSettingsDrawer />
<Box w="100%" overflowX="auto" flex={1}> <Box w="100%" overflowX="auto" flex={1} id="output-container">
<OutputsTable experimentId={router.query.id as string | undefined} /> <OutputsTable experimentId={experiment.data?.id} />
</Box> </Box>
</VStack> </VStack>
</AppShell> </AppShell>

View File

@@ -66,7 +66,7 @@ export const v1ApiRouter = createOpenApiRouter({
if (!existingResponse) return { respPayload: null }; if (!existingResponse) return { respPayload: null };
await prisma.loggedCall.create({ const newCall = await prisma.loggedCall.create({
data: { data: {
projectId: ctx.key.projectId, projectId: ctx.key.projectId,
requestedAt: new Date(input.requestedAt), requestedAt: new Date(input.requestedAt),
@@ -75,11 +75,7 @@ export const v1ApiRouter = createOpenApiRouter({
}, },
}); });
await createTags( await createTags(newCall.projectId, newCall.id, input.tags);
existingResponse.originalLoggedCall.projectId,
existingResponse.originalLoggedCallId,
input.tags,
);
return { return {
respPayload: existingResponse.respPayload, respPayload: existingResponse.respPayload,
}; };
@@ -111,7 +107,7 @@ export const v1ApiRouter = createOpenApiRouter({
.default({}), .default({}),
}), }),
) )
.output(z.object({ status: z.literal("ok") })) .output(z.object({ status: z.union([z.literal("ok"), z.literal("error")]) }))
.mutation(async ({ input, ctx }) => { .mutation(async ({ input, ctx }) => {
const reqPayload = await reqValidator.spa(input.reqPayload); const reqPayload = await reqValidator.spa(input.reqPayload);
const respPayload = await respValidator.spa(input.respPayload); const respPayload = await respValidator.spa(input.respPayload);
@@ -212,6 +208,7 @@ export const v1ApiRouter = createOpenApiRouter({
createdAt: true, createdAt: true,
cacheHit: true, cacheHit: true,
tags: true, tags: true,
id: true,
modelResponse: { modelResponse: {
select: { select: {
id: true, id: true,
@@ -237,7 +234,7 @@ async function createTags(projectId: string, loggedCallId: string, tags: Record<
const tagsToCreate = Object.entries(tags).map(([name, value]) => ({ const tagsToCreate = Object.entries(tags).map(([name, value]) => ({
projectId, projectId,
loggedCallId, loggedCallId,
name: name.replaceAll(/[^a-zA-Z0-9_$]/g, "_"), name: name.replaceAll(/[^a-zA-Z0-9_$.]/g, "_"),
value, value,
})); }));
await prisma.loggedCallTag.createMany({ await prisma.loggedCallTag.createMany({

View File

@@ -12,6 +12,7 @@ import { projectsRouter } from "./routers/projects.router";
import { dashboardRouter } from "./routers/dashboard.router"; import { dashboardRouter } from "./routers/dashboard.router";
import { loggedCallsRouter } from "./routers/loggedCalls.router"; import { loggedCallsRouter } from "./routers/loggedCalls.router";
import { usersRouter } from "./routers/users.router"; import { usersRouter } from "./routers/users.router";
import { adminJobsRouter } from "./routers/adminJobs.router";
/** /**
* This is the primary router for your server. * This is the primary router for your server.
@@ -32,6 +33,7 @@ export const appRouter = createTRPCRouter({
dashboard: dashboardRouter, dashboard: dashboardRouter,
loggedCalls: loggedCallsRouter, loggedCalls: loggedCallsRouter,
users: usersRouter, users: usersRouter,
adminJobs: adminJobsRouter,
}); });
// export type definition of API // export type definition of API

View File

@@ -0,0 +1,18 @@
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { kysely } from "~/server/db";
import { requireIsAdmin } from "~/utils/accessControl";
export const adminJobsRouter = createTRPCRouter({
list: protectedProcedure.input(z.object({})).query(async ({ ctx }) => {
await requireIsAdmin(ctx);
return await kysely
.selectFrom("graphile_worker.jobs")
.limit(100)
.selectAll()
.orderBy("created_at", "desc")
.execute();
}),
});

View File

@@ -85,15 +85,16 @@ export const experimentsRouter = createTRPCRouter({
return experimentsWithCounts; return experimentsWithCounts;
}), }),
get: publicProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => { get: publicProcedure.input(z.object({ slug: z.string() })).query(async ({ input, ctx }) => {
await requireCanViewExperiment(input.id, ctx);
const experiment = await prisma.experiment.findFirstOrThrow({ const experiment = await prisma.experiment.findFirstOrThrow({
where: { id: input.id }, where: { slug: input.slug },
include: { include: {
project: true, project: true,
}, },
}); });
await requireCanViewExperiment(experiment.id, ctx);
const canModify = ctx.session?.user.id const canModify = ctx.session?.user.id
? await canModifyExperiment(experiment.id, ctx.session?.user.id) ? await canModifyExperiment(experiment.id, ctx.session?.user.id)
: false; : false;
@@ -177,6 +178,7 @@ export const experimentsRouter = createTRPCRouter({
existingToNewVariantIds.set(variant.id, newVariantId); existingToNewVariantIds.set(variant.id, newVariantId);
variantsToCreate.push({ variantsToCreate.push({
...variant, ...variant,
uiId: uuidv4(),
id: newVariantId, id: newVariantId,
experimentId: newExperimentId, experimentId: newExperimentId,
}); });
@@ -190,6 +192,7 @@ export const experimentsRouter = createTRPCRouter({
scenariosToCreate.push({ scenariosToCreate.push({
...scenario, ...scenario,
id: newScenarioId, id: newScenarioId,
uiId: uuidv4(),
experimentId: newExperimentId, experimentId: newExperimentId,
variableValues: scenario.variableValues as Prisma.InputJsonValue, variableValues: scenario.variableValues as Prisma.InputJsonValue,
}); });
@@ -290,7 +293,10 @@ export const experimentsRouter = createTRPCRouter({
}), }),
]); ]);
return newExperimentId; const newExperiment = await prisma.experiment.findUniqueOrThrow({
where: { id: newExperimentId },
});
return newExperiment;
}), }),
create: protectedProcedure create: protectedProcedure
@@ -335,7 +341,6 @@ export const experimentsRouter = createTRPCRouter({
definePrompt("openai/ChatCompletion", { definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo-0613", model: "gpt-3.5-turbo-0613",
stream: true,
messages: [ messages: [
{ {
role: "system", role: "system",

View File

@@ -1,27 +1,6 @@
import { import { type DB } from "./db.types";
type Experiment,
type PromptVariant, import { PrismaClient } from "@prisma/client";
type TestScenario,
type TemplateVariable,
type ScenarioVariantCell,
type ModelResponse,
type Evaluation,
type OutputEvaluation,
type Dataset,
type DatasetEntry,
type Project,
type ProjectUser,
type WorldChampEntrant,
type LoggedCall,
type LoggedCallModelResponse,
type LoggedCallTag,
type ApiKey,
type Account,
type Session,
type User,
type VerificationToken,
PrismaClient,
} from "@prisma/client";
import { Kysely, PostgresDialect } from "kysely"; import { Kysely, PostgresDialect } from "kysely";
// TODO: Revert to normal import when our tsconfig.json is fixed // TODO: Revert to normal import when our tsconfig.json is fixed
// import { Pool } from "pg"; // import { Pool } from "pg";
@@ -32,30 +11,6 @@ const Pool = (UntypedPool.default ? UntypedPool.default : UntypedPool) as typeof
import { env } from "~/env.mjs"; import { env } from "~/env.mjs";
interface DB {
Experiment: Experiment;
PromptVariant: PromptVariant;
TestScenario: TestScenario;
TemplateVariable: TemplateVariable;
ScenarioVariantCell: ScenarioVariantCell;
ModelResponse: ModelResponse;
Evaluation: Evaluation;
OutputEvaluation: OutputEvaluation;
Dataset: Dataset;
DatasetEntry: DatasetEntry;
Project: Project;
ProjectUser: ProjectUser;
WorldChampEntrant: WorldChampEntrant;
LoggedCall: LoggedCall;
LoggedCallModelResponse: LoggedCallModelResponse;
LoggedCallTag: LoggedCallTag;
ApiKey: ApiKey;
Account: Account;
Session: Session;
User: User;
VerificationToken: VerificationToken;
}
const globalForPrisma = globalThis as unknown as { const globalForPrisma = globalThis as unknown as {
prisma: PrismaClient | undefined; prisma: PrismaClient | undefined;
}; };

336
app/src/server/db.types.ts Normal file
View File

@@ -0,0 +1,336 @@
import type { ColumnType } from "kysely";
export type Generated<T> = T extends ColumnType<infer S, infer I, infer U>
? ColumnType<S, I | undefined, U>
: ColumnType<T, T | undefined, T>;
export type Int8 = ColumnType<string, string | number | bigint, string | number | bigint>;
export type Json = ColumnType<JsonValue, string, string>;
export type JsonArray = JsonValue[];
export type JsonObject = {
[K in string]?: JsonValue;
};
export type JsonPrimitive = boolean | null | number | string;
export type JsonValue = JsonArray | JsonObject | JsonPrimitive;
export type Numeric = ColumnType<string, string | number, string | number>;
export type Timestamp = ColumnType<Date, Date | string, Date | string>;
export interface _PrismaMigrations {
id: string;
checksum: string;
finished_at: Timestamp | null;
migration_name: string;
logs: string | null;
rolled_back_at: Timestamp | null;
started_at: Generated<Timestamp>;
applied_steps_count: Generated<number>;
}
export interface Account {
id: string;
userId: string;
type: string;
provider: string;
providerAccountId: string;
refresh_token: string | null;
refresh_token_expires_in: number | null;
access_token: string | null;
expires_at: number | null;
token_type: string | null;
scope: string | null;
id_token: string | null;
session_state: string | null;
}
export interface ApiKey {
id: string;
name: string;
apiKey: string;
projectId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface Dataset {
id: string;
name: string;
projectId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface DatasetEntry {
id: string;
input: string;
output: string | null;
datasetId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface Evaluation {
id: string;
label: string;
value: string;
evalType: "CONTAINS" | "DOES_NOT_CONTAIN" | "GPT4_EVAL";
experimentId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface Experiment {
id: string;
label: string;
sortIndex: Generated<number>;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
projectId: string;
}
export interface GraphileWorkerJobQueues {
queue_name: string;
job_count: number;
locked_at: Timestamp | null;
locked_by: string | null;
}
export interface GraphileWorkerJobs {
id: Generated<Int8>;
queue_name: string | null;
task_identifier: string;
payload: Generated<Json>;
priority: Generated<number>;
run_at: Generated<Timestamp>;
attempts: Generated<number>;
max_attempts: Generated<number>;
last_error: string | null;
created_at: Generated<Timestamp>;
updated_at: Generated<Timestamp>;
key: string | null;
locked_at: Timestamp | null;
locked_by: string | null;
revision: Generated<number>;
flags: Json | null;
}
export interface GraphileWorkerKnownCrontabs {
identifier: string;
known_since: Timestamp;
last_execution: Timestamp | null;
}
export interface GraphileWorkerMigrations {
id: number;
ts: Generated<Timestamp>;
}
export interface LoggedCall {
id: string;
requestedAt: Timestamp;
cacheHit: boolean;
modelResponseId: string | null;
projectId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
model: string | null;
}
export interface LoggedCallModelResponse {
id: string;
reqPayload: Json;
statusCode: number | null;
respPayload: Json | null;
errorMessage: string | null;
requestedAt: Timestamp;
receivedAt: Timestamp;
cacheKey: string | null;
durationMs: number | null;
inputTokens: number | null;
outputTokens: number | null;
finishReason: string | null;
completionId: string | null;
cost: Numeric | null;
originalLoggedCallId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface LoggedCallTag {
id: string;
name: string;
value: string | null;
loggedCallId: string;
projectId: string;
}
export interface ModelResponse {
id: string;
cacheKey: string;
respPayload: Json | null;
inputTokens: number | null;
outputTokens: number | null;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
scenarioVariantCellId: string;
cost: number | null;
requestedAt: Timestamp | null;
receivedAt: Timestamp | null;
statusCode: number | null;
errorMessage: string | null;
retryTime: Timestamp | null;
outdated: Generated<boolean>;
}
export interface OutputEvaluation {
id: string;
result: number;
details: string | null;
modelResponseId: string;
evaluationId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface Project {
id: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
personalProjectUserId: string | null;
name: Generated<string>;
}
export interface ProjectUser {
id: string;
role: "ADMIN" | "MEMBER" | "VIEWER";
projectId: string;
userId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface PromptVariant {
id: string;
label: string;
uiId: string;
visible: Generated<boolean>;
sortIndex: Generated<number>;
experimentId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
promptConstructor: string;
model: string;
promptConstructorVersion: number;
modelProvider: string;
}
export interface ScenarioVariantCell {
id: string;
errorMessage: string | null;
promptVariantId: string;
testScenarioId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
retrievalStatus: Generated<"COMPLETE" | "ERROR" | "IN_PROGRESS" | "PENDING">;
prompt: Json | null;
jobQueuedAt: Timestamp | null;
jobStartedAt: Timestamp | null;
}
export interface Session {
id: string;
sessionToken: string;
userId: string;
expires: Timestamp;
}
export interface TemplateVariable {
id: string;
label: string;
experimentId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface TestScenario {
id: string;
variableValues: Json;
uiId: string;
visible: Generated<boolean>;
sortIndex: Generated<number>;
experimentId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface User {
id: string;
name: string | null;
email: string | null;
emailVerified: Timestamp | null;
image: string | null;
createdAt: Generated<Timestamp>;
updatedAt: Generated<Timestamp>;
role: Generated<"ADMIN" | "USER">;
}
export interface UserInvitation {
id: string;
projectId: string;
email: string;
role: "ADMIN" | "MEMBER" | "VIEWER";
invitationToken: string;
senderId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface VerificationToken {
identifier: string;
token: string;
expires: Timestamp;
}
export interface WorldChampEntrant {
id: string;
userId: string;
approved: Generated<boolean>;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface DB {
_prisma_migrations: _PrismaMigrations;
Account: Account;
ApiKey: ApiKey;
Dataset: Dataset;
DatasetEntry: DatasetEntry;
Evaluation: Evaluation;
Experiment: Experiment;
"graphile_worker.job_queues": GraphileWorkerJobQueues;
"graphile_worker.jobs": GraphileWorkerJobs;
"graphile_worker.known_crontabs": GraphileWorkerKnownCrontabs;
"graphile_worker.migrations": GraphileWorkerMigrations;
LoggedCall: LoggedCall;
LoggedCallModelResponse: LoggedCallModelResponse;
LoggedCallTag: LoggedCallTag;
ModelResponse: ModelResponse;
OutputEvaluation: OutputEvaluation;
Project: Project;
ProjectUser: ProjectUser;
PromptVariant: PromptVariant;
ScenarioVariantCell: ScenarioVariantCell;
Session: Session;
TemplateVariable: TemplateVariable;
TestScenario: TestScenario;
User: User;
UserInvitation: UserInvitation;
VerificationToken: VerificationToken;
WorldChampEntrant: WorldChampEntrant;
}

View File

@@ -1,19 +0,0 @@
import "dotenv/config";
import { openai } from "../utils/openai";
const resp = await openai.chat.completions.create({
model: "gpt-3.5-turbo-0613",
stream: true,
messages: [
{
role: "user",
content: "count to 20",
},
],
});
for await (const part of resp) {
console.log("part", part);
}
console.log("final resp", resp);

View File

@@ -1,15 +1,26 @@
// Import necessary dependencies import { type Helpers, type Task, makeWorkerUtils, TaskSpec } from "graphile-worker";
import { quickAddJob, type Helpers, type Task } from "graphile-worker";
import { env } from "~/env.mjs"; import { env } from "~/env.mjs";
// Define the defineTask function let workerUtilsPromise: ReturnType<typeof makeWorkerUtils> | null = null;
function workerUtils() {
if (!workerUtilsPromise) {
workerUtilsPromise = makeWorkerUtils({
connectionString: env.DATABASE_URL,
});
}
return workerUtilsPromise;
}
function defineTask<TPayload>( function defineTask<TPayload>(
taskIdentifier: string, taskIdentifier: string,
taskHandler: (payload: TPayload, helpers: Helpers) => Promise<void>, taskHandler: (payload: TPayload, helpers: Helpers) => Promise<void>,
) { ) {
const enqueue = async (payload: TPayload, runAt?: Date) => { const enqueue = async (payload: TPayload, spec?: TaskSpec) => {
console.log("Enqueuing task", taskIdentifier, payload); console.log("Enqueuing task", taskIdentifier, payload);
await quickAddJob({ connectionString: env.DATABASE_URL }, taskIdentifier, payload, { runAt });
const utils = await workerUtils();
return await utils.addJob(taskIdentifier, payload, spec);
}; };
const handler = (payload: TPayload, helpers: Helpers) => { const handler = (payload: TPayload, helpers: Helpers) => {

View File

@@ -153,7 +153,7 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
stream, stream,
numPreviousTries: numPreviousTries + 1, numPreviousTries: numPreviousTries + 1,
}, },
retryTime, { runAt: retryTime, jobKey: cellId },
); );
await prisma.scenarioVariantCell.update({ await prisma.scenarioVariantCell.update({
where: { id: cellId }, where: { id: cellId },
@@ -184,6 +184,6 @@ export const queueQueryModel = async (cellId: string, stream: boolean) => {
jobQueuedAt: new Date(), jobQueuedAt: new Date(),
}, },
}), }),
queryModel.enqueue({ cellId, stream, numPreviousTries: 0 }), queryModel.enqueue({ cellId, stream, numPreviousTries: 0 }, { jobKey: cellId }),
]); ]);
}; };

View File

@@ -17,7 +17,7 @@ const taskList = registeredTasks.reduce((acc, task) => {
// Run a worker to execute jobs: // Run a worker to execute jobs:
const runner = await run({ const runner = await run({
connectionString: env.DATABASE_URL, connectionString: env.DATABASE_URL,
concurrency: 10, concurrency: env.WORKER_CONCURRENCY,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc // Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false, noHandleSignals: false,
pollInterval: 1000, pollInterval: 1000,

View File

@@ -17,6 +17,8 @@ export const requireNothing = (ctx: TRPCContext) => {
}; };
export const requireIsProjectAdmin = async (projectId: string, ctx: TRPCContext) => { export const requireIsProjectAdmin = async (projectId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id; const userId = ctx.session?.user.id;
if (!userId) { if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" }); throw new TRPCError({ code: "UNAUTHORIZED" });
@@ -33,11 +35,11 @@ export const requireIsProjectAdmin = async (projectId: string, ctx: TRPCContext)
if (!isAdmin) { if (!isAdmin) {
throw new TRPCError({ code: "UNAUTHORIZED" }); throw new TRPCError({ code: "UNAUTHORIZED" });
} }
ctx.markAccessControlRun();
}; };
export const requireCanViewProject = async (projectId: string, ctx: TRPCContext) => { export const requireCanViewProject = async (projectId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id; const userId = ctx.session?.user.id;
if (!userId) { if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" }); throw new TRPCError({ code: "UNAUTHORIZED" });
@@ -53,11 +55,11 @@ export const requireCanViewProject = async (projectId: string, ctx: TRPCContext)
if (!canView) { if (!canView) {
throw new TRPCError({ code: "UNAUTHORIZED" }); throw new TRPCError({ code: "UNAUTHORIZED" });
} }
ctx.markAccessControlRun();
}; };
export const requireCanModifyProject = async (projectId: string, ctx: TRPCContext) => { export const requireCanModifyProject = async (projectId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id; const userId = ctx.session?.user.id;
if (!userId) { if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" }); throw new TRPCError({ code: "UNAUTHORIZED" });
@@ -74,11 +76,11 @@ export const requireCanModifyProject = async (projectId: string, ctx: TRPCContex
if (!canModify) { if (!canModify) {
throw new TRPCError({ code: "UNAUTHORIZED" }); throw new TRPCError({ code: "UNAUTHORIZED" });
} }
ctx.markAccessControlRun();
}; };
export const requireCanViewDataset = async (datasetId: string, ctx: TRPCContext) => { export const requireCanViewDataset = async (datasetId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const dataset = await prisma.dataset.findFirst({ const dataset = await prisma.dataset.findFirst({
where: { where: {
id: datasetId, id: datasetId,
@@ -96,8 +98,6 @@ export const requireCanViewDataset = async (datasetId: string, ctx: TRPCContext)
if (!dataset) { if (!dataset) {
throw new TRPCError({ code: "UNAUTHORIZED" }); throw new TRPCError({ code: "UNAUTHORIZED" });
} }
ctx.markAccessControlRun();
}; };
export const requireCanModifyDataset = async (datasetId: string, ctx: TRPCContext) => { export const requireCanModifyDataset = async (datasetId: string, ctx: TRPCContext) => {
@@ -105,13 +105,10 @@ export const requireCanModifyDataset = async (datasetId: string, ctx: TRPCContex
await requireCanViewDataset(datasetId, ctx); await requireCanViewDataset(datasetId, ctx);
}; };
export const requireCanViewExperiment = async (experimentId: string, ctx: TRPCContext) => { export const requireCanViewExperiment = (experimentId: string, ctx: TRPCContext): Promise<void> => {
await prisma.experiment.findFirst({
where: { id: experimentId },
});
// Right now all experiments are publicly viewable, so this is a no-op. // Right now all experiments are publicly viewable, so this is a no-op.
ctx.markAccessControlRun(); ctx.markAccessControlRun();
return Promise.resolve();
}; };
export const canModifyExperiment = async (experimentId: string, userId: string) => { export const canModifyExperiment = async (experimentId: string, userId: string) => {
@@ -136,6 +133,8 @@ export const canModifyExperiment = async (experimentId: string, userId: string)
}; };
export const requireCanModifyExperiment = async (experimentId: string, ctx: TRPCContext) => { export const requireCanModifyExperiment = async (experimentId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id; const userId = ctx.session?.user.id;
if (!userId) { if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" }); throw new TRPCError({ code: "UNAUTHORIZED" });
@@ -144,6 +143,17 @@ export const requireCanModifyExperiment = async (experimentId: string, ctx: TRPC
if (!(await canModifyExperiment(experimentId, userId))) { if (!(await canModifyExperiment(experimentId, userId))) {
throw new TRPCError({ code: "UNAUTHORIZED" }); throw new TRPCError({ code: "UNAUTHORIZED" });
} }
};
ctx.markAccessControlRun();
export const requireIsAdmin = async (ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id;
if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
if (!(await isAdmin(userId))) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
}; };

View File

@@ -15,8 +15,8 @@ export const useExperiments = () => {
export const useExperiment = () => { export const useExperiment = () => {
const router = useRouter(); const router = useRouter();
const experiment = api.experiments.get.useQuery( const experiment = api.experiments.get.useQuery(
{ id: router.query.id as string }, { slug: router.query.experimentSlug as string },
{ enabled: !!router.query.id }, { enabled: !!router.query.experimentSlug },
); );
return experiment; return experiment;

View File

@@ -141,10 +141,20 @@
"type": "object", "type": "object",
"properties": { "properties": {
"status": { "status": {
"anyOf": [
{
"type": "string", "type": "string",
"enum": [ "enum": [
"ok" "ok"
] ]
},
{
"type": "string",
"enum": [
"error"
]
}
]
} }
}, },
"required": [ "required": [

View File

@@ -13,7 +13,8 @@ from .local_testing_only_get_latest_logged_call_response_200_tags import (
from .report_json_body import ReportJsonBody from .report_json_body import ReportJsonBody
from .report_json_body_tags import ReportJsonBodyTags from .report_json_body_tags import ReportJsonBodyTags
from .report_response_200 import ReportResponse200 from .report_response_200 import ReportResponse200
from .report_response_200_status import ReportResponse200Status from .report_response_200_status_type_0 import ReportResponse200StatusType0
from .report_response_200_status_type_1 import ReportResponse200StatusType1
__all__ = ( __all__ = (
"CheckCacheJsonBody", "CheckCacheJsonBody",
@@ -25,5 +26,6 @@ __all__ = (
"ReportJsonBody", "ReportJsonBody",
"ReportJsonBodyTags", "ReportJsonBodyTags",
"ReportResponse200", "ReportResponse200",
"ReportResponse200Status", "ReportResponse200StatusType0",
"ReportResponse200StatusType1",
) )

View File

@@ -1,8 +1,9 @@
from typing import Any, Dict, Type, TypeVar from typing import Any, Dict, Type, TypeVar, Union
from attrs import define from attrs import define
from ..models.report_response_200_status import ReportResponse200Status from ..models.report_response_200_status_type_0 import ReportResponse200StatusType0
from ..models.report_response_200_status_type_1 import ReportResponse200StatusType1
T = TypeVar("T", bound="ReportResponse200") T = TypeVar("T", bound="ReportResponse200")
@@ -11,12 +12,18 @@ T = TypeVar("T", bound="ReportResponse200")
class ReportResponse200: class ReportResponse200:
""" """
Attributes: Attributes:
status (ReportResponse200Status): status (Union[ReportResponse200StatusType0, ReportResponse200StatusType1]):
""" """
status: ReportResponse200Status status: Union[ReportResponse200StatusType0, ReportResponse200StatusType1]
def to_dict(self) -> Dict[str, Any]: def to_dict(self) -> Dict[str, Any]:
status: str
if isinstance(self.status, ReportResponse200StatusType0):
status = self.status.value
else:
status = self.status.value status = self.status.value
field_dict: Dict[str, Any] = {} field_dict: Dict[str, Any] = {}
@@ -31,7 +38,23 @@ class ReportResponse200:
@classmethod @classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy() d = src_dict.copy()
status = ReportResponse200Status(d.pop("status"))
def _parse_status(data: object) -> Union[ReportResponse200StatusType0, ReportResponse200StatusType1]:
try:
if not isinstance(data, str):
raise TypeError()
status_type_0 = ReportResponse200StatusType0(data)
return status_type_0
except: # noqa: E722
pass
if not isinstance(data, str):
raise TypeError()
status_type_1 = ReportResponse200StatusType1(data)
return status_type_1
status = _parse_status(d.pop("status"))
report_response_200 = cls( report_response_200 = cls(
status=status, status=status,

View File

@@ -1,7 +1,7 @@
from enum import Enum from enum import Enum
class ReportResponse200Status(str, Enum): class ReportResponse200StatusType0(str, Enum):
OK = "ok" OK = "ok"
def __str__(self) -> str: def __str__(self) -> str:

View File

@@ -0,0 +1,8 @@
from enum import Enum
class ReportResponse200StatusType1(str, Enum):
ERROR = "error"
def __str__(self) -> str:
return str(self.value)

View File

@@ -24,10 +24,18 @@ def _get_tags(openpipe_options):
return ReportJsonBodyTags.from_dict(tags) return ReportJsonBodyTags.from_dict(tags)
def _should_check_cache(openpipe_options): def _should_check_cache(openpipe_options, req_payload):
if configured_client.token == "": if configured_client.token == "":
return False return False
return openpipe_options.get("cache", False)
cache_requested = openpipe_options.get("cache", False)
streaming = req_payload.get("stream", False)
if cache_requested and streaming:
print(
"Caching is not yet supported for streaming requests. Ignoring cache flag. Vote for this feature at https://github.com/OpenPipe/OpenPipe/issues/159"
)
return False
return cache_requested
def _process_cache_payload( def _process_cache_payload(
@@ -44,7 +52,7 @@ def maybe_check_cache(
openpipe_options={}, openpipe_options={},
req_payload={}, req_payload={},
): ):
if not _should_check_cache(openpipe_options): if not _should_check_cache(openpipe_options, req_payload):
return None return None
try: try:
payload = check_cache.sync( payload = check_cache.sync(
@@ -68,7 +76,7 @@ async def maybe_check_cache_async(
openpipe_options={}, openpipe_options={},
req_payload={}, req_payload={},
): ):
if not _should_check_cache(openpipe_options): if not _should_check_cache(openpipe_options, req_payload):
return None return None
try: try:

View File

@@ -13,15 +13,17 @@
"author": "", "author": "",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"encoding": "^0.1.13",
"form-data": "^4.0.0", "form-data": "^4.0.0",
"lodash-es": "^4.17.21", "lodash-es": "^4.17.21",
"node-fetch": "^3.3.2", "node-fetch": "^2.6.12",
"openai-beta": "npm:openai@4.0.0-beta.7", "openai-beta": "npm:openai@4.0.0-beta.7",
"openai-legacy": "npm:openai@3.3.0" "openai-legacy": "npm:openai@3.3.0"
}, },
"devDependencies": { "devDependencies": {
"@types/lodash-es": "^4.17.8", "@types/lodash-es": "^4.17.8",
"@types/node": "^20.4.8", "@types/node": "^20.4.8",
"@types/node-fetch": "^2.6.4",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"tsx": "^3.12.7", "tsx": "^3.12.7",
"typescript": "^5.0.4", "typescript": "^5.0.4",

View File

@@ -2,42 +2,38 @@
/* istanbul ignore file */ /* istanbul ignore file */
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */ /* eslint-disable */
import FormData from "form-data"; import FormData from 'form-data';
import fetch, { Headers } from "node-fetch"; import fetch, { Headers } from 'node-fetch';
import type { RequestInit, Response } from "node-fetch"; import type { RequestInit, Response } from 'node-fetch';
import type { AbortSignal } from 'node-fetch/externals';
// @ts-expect-error TODO maybe I need an older node-fetch or something? import { ApiError } from './ApiError';
import type { AbortSignal } from "node-fetch/externals"; import type { ApiRequestOptions } from './ApiRequestOptions';
import type { ApiResult } from './ApiResult';
import { CancelablePromise } from './CancelablePromise';
import type { OnCancel } from './CancelablePromise';
import type { OpenAPIConfig } from './OpenAPI';
import { ApiError } from "./ApiError"; export const isDefined = <T>(value: T | null | undefined): value is Exclude<T, null | undefined> => {
import type { ApiRequestOptions } from "./ApiRequestOptions";
import type { ApiResult } from "./ApiResult";
import { CancelablePromise } from "./CancelablePromise";
import type { OnCancel } from "./CancelablePromise";
import type { OpenAPIConfig } from "./OpenAPI";
export const isDefined = <T>(
value: T | null | undefined
): value is Exclude<T, null | undefined> => {
return value !== undefined && value !== null; return value !== undefined && value !== null;
}; };
export const isString = (value: any): value is string => { export const isString = (value: any): value is string => {
return typeof value === "string"; return typeof value === 'string';
}; };
export const isStringWithValue = (value: any): value is string => { export const isStringWithValue = (value: any): value is string => {
return isString(value) && value !== ""; return isString(value) && value !== '';
}; };
export const isBlob = (value: any): value is Blob => { export const isBlob = (value: any): value is Blob => {
return ( return (
typeof value === "object" && typeof value === 'object' &&
typeof value.type === "string" && typeof value.type === 'string' &&
typeof value.stream === "function" && typeof value.stream === 'function' &&
typeof value.arrayBuffer === "function" && typeof value.arrayBuffer === 'function' &&
typeof value.constructor === "function" && typeof value.constructor === 'function' &&
typeof value.constructor.name === "string" && typeof value.constructor.name === 'string' &&
/^(Blob|File)$/.test(value.constructor.name) && /^(Blob|File)$/.test(value.constructor.name) &&
/^(Blob|File)$/.test(value[Symbol.toStringTag]) /^(Blob|File)$/.test(value[Symbol.toStringTag])
); );
@@ -52,7 +48,7 @@ export const base64 = (str: string): string => {
return btoa(str); return btoa(str);
} catch (err) { } catch (err) {
// @ts-ignore // @ts-ignore
return Buffer.from(str).toString("base64"); return Buffer.from(str).toString('base64');
} }
}; };
@@ -66,10 +62,10 @@ export const getQueryString = (params: Record<string, any>): string => {
const process = (key: string, value: any) => { const process = (key: string, value: any) => {
if (isDefined(value)) { if (isDefined(value)) {
if (Array.isArray(value)) { if (Array.isArray(value)) {
value.forEach((v) => { value.forEach(v => {
process(key, v); process(key, v);
}); });
} else if (typeof value === "object") { } else if (typeof value === 'object') {
Object.entries(value).forEach(([k, v]) => { Object.entries(value).forEach(([k, v]) => {
process(`${key}[${k}]`, v); process(`${key}[${k}]`, v);
}); });
@@ -84,17 +80,17 @@ export const getQueryString = (params: Record<string, any>): string => {
}); });
if (qs.length > 0) { if (qs.length > 0) {
return `?${qs.join("&")}`; return `?${qs.join('&')}`;
} }
return ""; return '';
}; };
const getUrl = (config: OpenAPIConfig, options: ApiRequestOptions): string => { const getUrl = (config: OpenAPIConfig, options: ApiRequestOptions): string => {
const encoder = config.ENCODE_PATH || encodeURI; const encoder = config.ENCODE_PATH || encodeURI;
const path = options.url const path = options.url
.replace("{api-version}", config.VERSION) .replace('{api-version}', config.VERSION)
.replace(/{(.*?)}/g, (substring: string, group: string) => { .replace(/{(.*?)}/g, (substring: string, group: string) => {
if (options.path?.hasOwnProperty(group)) { if (options.path?.hasOwnProperty(group)) {
return encoder(String(options.path[group])); return encoder(String(options.path[group]));
@@ -125,7 +121,7 @@ export const getFormData = (options: ApiRequestOptions): FormData | undefined =>
.filter(([_, value]) => isDefined(value)) .filter(([_, value]) => isDefined(value))
.forEach(([key, value]) => { .forEach(([key, value]) => {
if (Array.isArray(value)) { if (Array.isArray(value)) {
value.forEach((v) => process(key, v)); value.forEach(v => process(key, v));
} else { } else {
process(key, value); process(key, value);
} }
@@ -138,57 +134,48 @@ export const getFormData = (options: ApiRequestOptions): FormData | undefined =>
type Resolver<T> = (options: ApiRequestOptions) => Promise<T>; type Resolver<T> = (options: ApiRequestOptions) => Promise<T>;
export const resolve = async <T>( export const resolve = async <T>(options: ApiRequestOptions, resolver?: T | Resolver<T>): Promise<T | undefined> => {
options: ApiRequestOptions, if (typeof resolver === 'function') {
resolver?: T | Resolver<T>
): Promise<T | undefined> => {
if (typeof resolver === "function") {
return (resolver as Resolver<T>)(options); return (resolver as Resolver<T>)(options);
} }
return resolver; return resolver;
}; };
export const getHeaders = async ( export const getHeaders = async (config: OpenAPIConfig, options: ApiRequestOptions): Promise<Headers> => {
config: OpenAPIConfig,
options: ApiRequestOptions
): Promise<Headers> => {
const token = await resolve(options, config.TOKEN); const token = await resolve(options, config.TOKEN);
const username = await resolve(options, config.USERNAME); const username = await resolve(options, config.USERNAME);
const password = await resolve(options, config.PASSWORD); const password = await resolve(options, config.PASSWORD);
const additionalHeaders = await resolve(options, config.HEADERS); const additionalHeaders = await resolve(options, config.HEADERS);
const headers = Object.entries({ const headers = Object.entries({
Accept: "application/json", Accept: 'application/json',
...additionalHeaders, ...additionalHeaders,
...options.headers, ...options.headers,
}) })
.filter(([_, value]) => isDefined(value)) .filter(([_, value]) => isDefined(value))
.reduce( .reduce((headers, [key, value]) => ({
(headers, [key, value]) => ({
...headers, ...headers,
[key]: String(value), [key]: String(value),
}), }), {} as Record<string, string>);
{} as Record<string, string>
);
if (isStringWithValue(token)) { if (isStringWithValue(token)) {
headers["Authorization"] = `Bearer ${token}`; headers['Authorization'] = `Bearer ${token}`;
} }
if (isStringWithValue(username) && isStringWithValue(password)) { if (isStringWithValue(username) && isStringWithValue(password)) {
const credentials = base64(`${username}:${password}`); const credentials = base64(`${username}:${password}`);
headers["Authorization"] = `Basic ${credentials}`; headers['Authorization'] = `Basic ${credentials}`;
} }
if (options.body) { if (options.body) {
if (options.mediaType) { if (options.mediaType) {
headers["Content-Type"] = options.mediaType; headers['Content-Type'] = options.mediaType;
} else if (isBlob(options.body)) { } else if (isBlob(options.body)) {
headers["Content-Type"] = "application/octet-stream"; headers['Content-Type'] = 'application/octet-stream';
} else if (isString(options.body)) { } else if (isString(options.body)) {
headers["Content-Type"] = "text/plain"; headers['Content-Type'] = 'text/plain';
} else if (!isFormData(options.body)) { } else if (!isFormData(options.body)) {
headers["Content-Type"] = "application/json"; headers['Content-Type'] = 'application/json';
} }
} }
@@ -197,8 +184,8 @@ export const getHeaders = async (
export const getRequestBody = (options: ApiRequestOptions): any => { export const getRequestBody = (options: ApiRequestOptions): any => {
if (options.body !== undefined) { if (options.body !== undefined) {
if (options.mediaType?.includes("/json")) { if (options.mediaType?.includes('/json')) {
return JSON.stringify(options.body); return JSON.stringify(options.body)
} else if (isString(options.body) || isBlob(options.body) || isFormData(options.body)) { } else if (isString(options.body) || isBlob(options.body) || isFormData(options.body)) {
return options.body as any; return options.body as any;
} else { } else {
@@ -230,10 +217,7 @@ export const sendRequest = async (
return await fetch(url, request); return await fetch(url, request);
}; };
export const getResponseHeader = ( export const getResponseHeader = (response: Response, responseHeader?: string): string | undefined => {
response: Response,
responseHeader?: string
): string | undefined => {
if (responseHeader) { if (responseHeader) {
const content = response.headers.get(responseHeader); const content = response.headers.get(responseHeader);
if (isString(content)) { if (isString(content)) {
@@ -246,10 +230,10 @@ export const getResponseHeader = (
export const getResponseBody = async (response: Response): Promise<any> => { export const getResponseBody = async (response: Response): Promise<any> => {
if (response.status !== 204) { if (response.status !== 204) {
try { try {
const contentType = response.headers.get("Content-Type"); const contentType = response.headers.get('Content-Type');
if (contentType) { if (contentType) {
const jsonTypes = ["application/json", "application/problem+json"]; const jsonTypes = ['application/json', 'application/problem+json']
const isJSON = jsonTypes.some((type) => contentType.toLowerCase().startsWith(type)); const isJSON = jsonTypes.some(type => contentType.toLowerCase().startsWith(type));
if (isJSON) { if (isJSON) {
return await response.json(); return await response.json();
} else { } else {
@@ -265,15 +249,15 @@ export const getResponseBody = async (response: Response): Promise<any> => {
export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult): void => { export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult): void => {
const errors: Record<number, string> = { const errors: Record<number, string> = {
400: "Bad Request", 400: 'Bad Request',
401: "Unauthorized", 401: 'Unauthorized',
403: "Forbidden", 403: 'Forbidden',
404: "Not Found", 404: 'Not Found',
500: "Internal Server Error", 500: 'Internal Server Error',
502: "Bad Gateway", 502: 'Bad Gateway',
503: "Service Unavailable", 503: 'Service Unavailable',
...options.errors, ...options.errors,
}; }
const error = errors[result.status]; const error = errors[result.status];
if (error) { if (error) {
@@ -281,8 +265,8 @@ export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult):
} }
if (!result.ok) { if (!result.ok) {
const errorStatus = result.status ?? "unknown"; const errorStatus = result.status ?? 'unknown';
const errorStatusText = result.statusText ?? "unknown"; const errorStatusText = result.statusText ?? 'unknown';
const errorBody = (() => { const errorBody = (() => {
try { try {
return JSON.stringify(result.body, null, 2); return JSON.stringify(result.body, null, 2);
@@ -291,9 +275,7 @@ export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult):
} }
})(); })();
throw new ApiError( throw new ApiError(options, result,
options,
result,
`Generic Error: status: ${errorStatus}; status text: ${errorStatusText}; body: ${errorBody}` `Generic Error: status: ${errorStatus}; status text: ${errorStatusText}; body: ${errorBody}`
); );
} }
@@ -306,10 +288,7 @@ export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult):
* @returns CancelablePromise<T> * @returns CancelablePromise<T>
* @throws ApiError * @throws ApiError
*/ */
export const request = <T>( export const request = <T>(config: OpenAPIConfig, options: ApiRequestOptions): CancelablePromise<T> => {
config: OpenAPIConfig,
options: ApiRequestOptions
): CancelablePromise<T> => {
return new CancelablePromise(async (resolve, reject, onCancel) => { return new CancelablePromise(async (resolve, reject, onCancel) => {
try { try {
const url = getUrl(config, options); const url = getUrl(config, options);

View File

@@ -82,7 +82,7 @@ export class DefaultService {
tags?: Record<string, string>; tags?: Record<string, string>;
}, },
): CancelablePromise<{ ): CancelablePromise<{
status: 'ok'; status: ('ok' | 'error');
}> { }> {
return this.httpRequest.request({ return this.httpRequest.request({
method: 'POST', method: 'POST',

View File

@@ -2,10 +2,13 @@ import dotenv from "dotenv";
import { expect, test } from "vitest"; import { expect, test } from "vitest";
import OpenAI from "."; import OpenAI from ".";
import { import {
ChatCompletion,
CompletionCreateParams, CompletionCreateParams,
CreateChatCompletionRequestMessage, CreateChatCompletionRequestMessage,
} from "openai-beta/resources/chat/completions"; } from "openai-beta/resources/chat/completions";
import { OPClient } from "../codegen"; import { OPClient } from "../codegen";
import mergeChunks from "./mergeChunks";
import assert from "assert";
dotenv.config({ path: "../.env" }); dotenv.config({ path: "../.env" });
@@ -31,9 +34,7 @@ test("basic call", async () => {
}; };
const completion = await oaiClient.chat.completions.create({ const completion = await oaiClient.chat.completions.create({
...payload, ...payload,
openpipe: { openpipe: { tags: { promptId: "test" } },
tags: { promptId: "test" },
},
}); });
await completion.openpipe.reportingFinished; await completion.openpipe.reportingFinished;
const lastLogged = await lastLoggedCall(); const lastLogged = await lastLoggedCall();
@@ -46,29 +47,32 @@ const randomString = (length: number) => {
const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
return Array.from( return Array.from(
{ length }, { length },
() => characters[Math.floor(Math.random() * characters.length)] () => characters[Math.floor(Math.random() * characters.length)],
).join(""); ).join("");
}; };
test.skip("streaming", async () => { test("streaming", async () => {
const completion = await oaiClient.chat.completions.create({ const completion = await oaiClient.chat.completions.create({
model: "gpt-3.5-turbo", model: "gpt-3.5-turbo",
messages: [{ role: "system", content: "count to 4" }], messages: [{ role: "system", content: "count to 3" }],
stream: true, stream: true,
}); });
let merged = null; let merged: ChatCompletion | null = null;
for await (const chunk of completion) { for await (const chunk of completion) {
merged = merge_openai_chunks(merged, chunk); merged = mergeChunks(merged, chunk);
} }
const lastLogged = await lastLoggedCall(); const lastLogged = await lastLoggedCall();
expect(lastLogged?.modelResponse?.respPayload.choices[0].message.content).toBe( await completion.openpipe.reportingFinished;
merged.choices[0].message.content
); expect(merged).toMatchObject(lastLogged?.modelResponse?.respPayload);
expect(lastLogged?.modelResponse?.reqPayload.messages).toMatchObject([
{ role: "system", content: "count to 3" },
]);
}); });
test.skip("bad call streaming", async () => { test("bad call streaming", async () => {
try { try {
await oaiClient.chat.completions.create({ await oaiClient.chat.completions.create({
model: "gpt-3.5-turbo-blaster", model: "gpt-3.5-turbo-blaster",
@@ -76,26 +80,29 @@ test.skip("bad call streaming", async () => {
stream: true, stream: true,
}); });
} catch (e) { } catch (e) {
await e.openpipe.reportingFinished;
const lastLogged = await lastLoggedCall(); const lastLogged = await lastLoggedCall();
expect(lastLogged?.modelResponse?.errorMessage).toBe( expect(lastLogged?.modelResponse?.errorMessage).toEqual(
"The model `gpt-3.5-turbo-blaster` does not exist" "The model `gpt-3.5-turbo-blaster` does not exist",
); );
expect(lastLogged?.modelResponse?.statusCode).toBe(404); expect(lastLogged?.modelResponse?.statusCode).toEqual(404);
} }
}); });
test("bad call", async () => { test("bad call", async () => {
try { try {
await oaiClient.chat.completions.create({ await oaiClient.chat.completions.create({
model: "gpt-3.5-turbo-booster", model: "gpt-3.5-turbo-buster",
messages: [{ role: "system", content: "count to 10" }], messages: [{ role: "system", content: "count to 10" }],
}); });
} catch (e) { } catch (e) {
assert("openpipe" in e);
await e.openpipe.reportingFinished;
const lastLogged = await lastLoggedCall(); const lastLogged = await lastLoggedCall();
expect(lastLogged?.modelResponse?.errorMessage).toBe( expect(lastLogged?.modelResponse?.errorMessage).toEqual(
"The model `gpt-3.5-turbo-booster` does not exist" "The model `gpt-3.5-turbo-buster` does not exist",
); );
expect(lastLogged?.modelResponse?.statusCode).toBe(404); expect(lastLogged?.modelResponse?.statusCode).toEqual(404);
} }
}); });
@@ -109,12 +116,12 @@ test("caching", async () => {
messages: [message], messages: [message],
openpipe: { cache: true }, openpipe: { cache: true },
}); });
expect(completion.openpipe.cacheStatus).toBe("MISS"); expect(completion.openpipe.cacheStatus).toEqual("MISS");
await completion.openpipe.reportingFinished; await completion.openpipe.reportingFinished;
const firstLogged = await lastLoggedCall(); const firstLogged = await lastLoggedCall();
expect(completion.choices[0].message.content).toBe( expect(completion.choices[0].message.content).toEqual(
firstLogged?.modelResponse?.respPayload.choices[0].message.content firstLogged?.modelResponse?.respPayload.choices[0].message.content,
); );
const completion2 = await oaiClient.chat.completions.create({ const completion2 = await oaiClient.chat.completions.create({
@@ -122,5 +129,5 @@ test("caching", async () => {
messages: [message], messages: [message],
openpipe: { cache: true }, openpipe: { cache: true },
}); });
expect(completion2.openpipe.cacheStatus).toBe("HIT"); expect(completion2.openpipe.cacheStatus).toEqual("HIT");
}); });

View File

@@ -5,9 +5,9 @@ import {
ChatCompletion, ChatCompletion,
ChatCompletionChunk, ChatCompletionChunk,
CompletionCreateParams, CompletionCreateParams,
Completions,
} from "openai-beta/resources/chat/completions"; } from "openai-beta/resources/chat/completions";
import { WrappedStream } from "./streaming";
import { DefaultService, OPClient } from "../codegen"; import { DefaultService, OPClient } from "../codegen";
import { Stream } from "openai-beta/streaming"; import { Stream } from "openai-beta/streaming";
import { OpenPipeArgs, OpenPipeMeta, type OpenPipeConfig, getTags } from "../shared"; import { OpenPipeArgs, OpenPipeMeta, type OpenPipeConfig, getTags } from "../shared";
@@ -27,11 +27,11 @@ export default class OpenAI extends openai.OpenAI {
BASE: BASE:
openpipe?.baseUrl ?? readEnv("OPENPIPE_BASE_URL") ?? "https://app.openpipe.ai/api/v1", openpipe?.baseUrl ?? readEnv("OPENPIPE_BASE_URL") ?? "https://app.openpipe.ai/api/v1",
TOKEN: openPipeApiKey, TOKEN: openPipeApiKey,
}) }),
); );
} else { } else {
console.warn( console.warn(
"You're using the OpenPipe client without an API key. No completion requests will be logged." "You're using the OpenPipe client without an API key. No completion requests will be logged.",
); );
} }
} }
@@ -43,10 +43,10 @@ class WrappedChat extends openai.OpenAI.Chat {
this.completions.opClient = client; this.completions.opClient = client;
} }
completions: InstrumentedCompletions = new InstrumentedCompletions(this.client); completions: WrappedCompletions = new WrappedCompletions(this.client);
} }
class InstrumentedCompletions extends openai.OpenAI.Chat.Completions { class WrappedCompletions extends openai.OpenAI.Chat.Completions {
opClient?: OPClient; opClient?: OPClient;
constructor(client: openai.OpenAI, opClient?: OPClient) { constructor(client: openai.OpenAI, opClient?: OPClient) {
@@ -54,32 +54,35 @@ class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
this.opClient = opClient; this.opClient = opClient;
} }
_report(args: Parameters<DefaultService["report"]>[0]) { async _report(args: Parameters<DefaultService["report"]>[0]) {
try { try {
return this.opClient ? this.opClient.default.report(args) : Promise.resolve(); this.opClient ? await this.opClient.default.report(args) : Promise.resolve();
} catch (e) { } catch (e) {
console.error(e); console.error(e);
return Promise.resolve();
} }
} }
create( create(
body: CompletionCreateParams.CreateChatCompletionRequestNonStreaming & OpenPipeArgs, body: CompletionCreateParams.CreateChatCompletionRequestNonStreaming & OpenPipeArgs,
options?: Core.RequestOptions options?: Core.RequestOptions,
): Promise<Core.APIResponse<ChatCompletion & { openpipe: OpenPipeMeta }>>; ): Promise<Core.APIResponse<ChatCompletion & { openpipe: OpenPipeMeta }>>;
create( create(
body: CompletionCreateParams.CreateChatCompletionRequestStreaming & OpenPipeArgs, body: CompletionCreateParams.CreateChatCompletionRequestStreaming & OpenPipeArgs,
options?: Core.RequestOptions options?: Core.RequestOptions,
): Promise<Core.APIResponse<Stream<ChatCompletionChunk>>>; ): Promise<Core.APIResponse<WrappedStream>>;
async create( async create(
{ openpipe, ...body }: CompletionCreateParams & OpenPipeArgs, { openpipe, ...body }: CompletionCreateParams & OpenPipeArgs,
options?: Core.RequestOptions options?: Core.RequestOptions,
): Promise< ): Promise<Core.APIResponse<(ChatCompletion & { openpipe: OpenPipeMeta }) | WrappedStream>> {
Core.APIResponse<(ChatCompletion & { openpipe: OpenPipeMeta }) | Stream<ChatCompletionChunk>>
> {
console.log("LALALA REPORT", this.opClient);
const requestedAt = Date.now(); const requestedAt = Date.now();
const cacheRequested = openpipe?.cache ?? false; let reportingFinished: OpenPipeMeta["reportingFinished"] = Promise.resolve();
let cacheRequested = openpipe?.cache ?? false;
if (cacheRequested && body.stream) {
console.warn(
`Caching is not yet supported for streaming requests. Ignoring cache flag. Vote for this feature at https://github.com/OpenPipe/OpenPipe/issues/159`,
);
cacheRequested = false;
}
if (cacheRequested) { if (cacheRequested) {
try { try {
@@ -92,12 +95,13 @@ class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
.then((res) => res.respPayload); .then((res) => res.respPayload);
if (cached) { if (cached) {
const meta = {
cacheStatus: "HIT",
reportingFinished,
};
return { return {
...cached, ...cached,
openpipe: { openpipe: meta,
cacheStatus: "HIT",
reportingFinished: Promise.resolve(),
},
}; };
} }
} catch (e) { } catch (e) {
@@ -105,15 +109,23 @@ class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
} }
} }
let reportingFinished: OpenPipeMeta["reportingFinished"] = Promise.resolve();
try { try {
if (body.stream) { if (body.stream) {
const stream = await super.create(body, options); const stream = await super.create(body, options);
const wrappedStream = new WrappedStream(stream, (response) =>
this._report({
requestedAt,
receivedAt: Date.now(),
reqPayload: body,
respPayload: response,
statusCode: 200,
tags: getTags(openpipe),
}),
);
// Do some logging of each chunk here // Do some logging of each chunk here
return stream; return wrappedStream;
} else { } else {
const response = await super.create(body, options); const response = await super.create(body, options);
@@ -147,6 +159,16 @@ class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
tags: getTags(openpipe), tags: getTags(openpipe),
}); });
} }
// make sure error is an object we can add properties to
if (typeof error === "object" && error !== null) {
error = {
...error,
openpipe: {
cacheStatus: cacheRequested ? "MISS" : "SKIP",
reportingFinished,
},
};
}
throw error; throw error;
} }

View File

@@ -0,0 +1,43 @@
import { ChatCompletion, ChatCompletionChunk } from "openai-beta/resources/chat";
import { Stream } from "openai-beta/streaming";
import { OpenPipeMeta } from "../shared";
import mergeChunks from "./mergeChunks";
export class WrappedStream extends Stream<ChatCompletionChunk> {
openpipe: OpenPipeMeta;
private resolveReportingFinished: () => void = () => {};
private report: (response: unknown) => Promise<void>;
constructor(stream: Stream<ChatCompletionChunk>, report: (response: unknown) => Promise<void>) {
super(stream.response, stream.controller);
this.report = report;
const reportingFinished = new Promise<void>((resolve) => {
this.resolveReportingFinished = resolve;
});
this.openpipe = {
cacheStatus: "MISS",
reportingFinished,
};
}
async *[Symbol.asyncIterator](): AsyncIterator<ChatCompletionChunk, any, undefined> {
const iterator = super[Symbol.asyncIterator]();
let combinedResponse: ChatCompletion | null = null;
while (true) {
const result = await iterator.next();
if (result.done) break;
combinedResponse = mergeChunks(combinedResponse, result.value);
yield result.value;
}
await this.report(combinedResponse);
// Resolve the promise here
this.resolveReportingFinished();
}
}

View File

@@ -1,4 +1,5 @@
import pkg from "../package.json"; import pkg from "../package.json";
import { DefaultService } from "./codegen";
export type OpenPipeConfig = { export type OpenPipeConfig = {
apiKey?: string; apiKey?: string;
@@ -15,9 +16,11 @@ export type OpenPipeMeta = {
// We report your call to OpenPipe asynchronously in the background. If you // We report your call to OpenPipe asynchronously in the background. If you
// need to wait until the report is sent to take further action, you can await // need to wait until the report is sent to take further action, you can await
// this promise. // this promise.
reportingFinished: Promise<void | { status: "ok" }>; reportingFinished: Promise<void>;
}; };
export type ReportFn = (...args: Parameters<DefaultService["report"]>) => Promise<void>;
export const getTags = (args: OpenPipeArgs["openpipe"]): Record<string, string> => ({ export const getTags = (args: OpenPipeArgs["openpipe"]): Record<string, string> => ({
...args?.tags, ...args?.tags,
...(args?.cache ? { $cache: args.cache?.toString() } : {}), ...(args?.cache ? { $cache: args.cache?.toString() } : {}),

105
pnpm-lock.yaml generated
View File

@@ -134,6 +134,9 @@ importers:
kysely: kysely:
specifier: ^0.26.1 specifier: ^0.26.1
version: 0.26.1 version: 0.26.1
kysely-codegen:
specifier: ^0.10.1
version: 0.10.1(kysely@0.26.1)(pg@8.11.2)
lodash-es: lodash-es:
specifier: ^4.17.21 specifier: ^4.17.21
version: 4.17.21 version: 4.17.21
@@ -163,7 +166,7 @@ importers:
version: 6.9.4 version: 6.9.4
openai: openai:
specifier: 4.0.0-beta.7 specifier: 4.0.0-beta.7
version: 4.0.0-beta.7 version: 4.0.0-beta.7(encoding@0.1.13)
openpipe: openpipe:
specifier: workspace:* specifier: workspace:*
version: link:../client-libs/typescript version: link:../client-libs/typescript
@@ -354,6 +357,9 @@ importers:
client-libs/typescript: client-libs/typescript:
dependencies: dependencies:
encoding:
specifier: ^0.1.13
version: 0.1.13
form-data: form-data:
specifier: ^4.0.0 specifier: ^4.0.0
version: 4.0.0 version: 4.0.0
@@ -361,11 +367,11 @@ importers:
specifier: ^4.17.21 specifier: ^4.17.21
version: 4.17.21 version: 4.17.21
node-fetch: node-fetch:
specifier: ^3.3.2 specifier: ^2.6.12
version: 3.3.2 version: 2.6.12(encoding@0.1.13)
openai-beta: openai-beta:
specifier: npm:openai@4.0.0-beta.7 specifier: npm:openai@4.0.0-beta.7
version: /openai@4.0.0-beta.7 version: /openai@4.0.0-beta.7(encoding@0.1.13)
openai-legacy: openai-legacy:
specifier: npm:openai@3.3.0 specifier: npm:openai@3.3.0
version: /openai@3.3.0 version: /openai@3.3.0
@@ -376,6 +382,9 @@ importers:
'@types/node': '@types/node':
specifier: ^20.4.8 specifier: ^20.4.8
version: 20.4.8 version: 20.4.8
'@types/node-fetch':
specifier: ^2.6.4
version: 2.6.4
dotenv: dotenv:
specifier: ^16.3.1 specifier: ^16.3.1
version: 16.3.1 version: 16.3.1
@@ -413,7 +422,7 @@ packages:
digest-fetch: 1.3.0 digest-fetch: 1.3.0
form-data-encoder: 1.7.2 form-data-encoder: 1.7.2
formdata-node: 4.4.1 formdata-node: 4.4.1
node-fetch: 2.6.12 node-fetch: 2.6.12(encoding@0.1.13)
transitivePeerDependencies: transitivePeerDependencies:
- encoding - encoding
dev: false dev: false
@@ -2687,7 +2696,7 @@ packages:
dependencies: dependencies:
https-proxy-agent: 5.0.1 https-proxy-agent: 5.0.1
mkdirp: 0.5.6 mkdirp: 0.5.6
node-fetch: 2.6.12 node-fetch: 2.6.12(encoding@0.1.13)
progress: 2.0.3 progress: 2.0.3
proxy-from-env: 1.1.0 proxy-from-env: 1.1.0
which: 2.0.2 which: 2.0.2
@@ -3177,7 +3186,6 @@ packages:
dependencies: dependencies:
'@types/node': 20.4.10 '@types/node': 20.4.10
form-data: 3.0.1 form-data: 3.0.1
dev: false
/@types/node@18.16.0: /@types/node@18.16.0:
resolution: {integrity: sha512-BsAaKhB+7X+H4GnSjGhJG9Qi8Tw+inU9nJDwmD5CgOmBLEI6ArdhikpLX7DjbjDRDTbqZzU2LSQNZg8WGPiSZQ==} resolution: {integrity: sha512-BsAaKhB+7X+H4GnSjGhJG9Qi8Tw+inU9nJDwmD5CgOmBLEI6ArdhikpLX7DjbjDRDTbqZzU2LSQNZg8WGPiSZQ==}
@@ -3828,7 +3836,6 @@ packages:
/asynckit@0.4.0: /asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
dev: false
/available-typed-arrays@1.0.5: /available-typed-arrays@1.0.5:
resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==} resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==}
@@ -4219,7 +4226,6 @@ packages:
engines: {node: '>= 0.8'} engines: {node: '>= 0.8'}
dependencies: dependencies:
delayed-stream: 1.0.0 delayed-stream: 1.0.0
dev: false
/comma-separated-tokens@1.0.8: /comma-separated-tokens@1.0.8:
resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==} resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==}
@@ -4504,11 +4510,6 @@ packages:
assert-plus: 1.0.0 assert-plus: 1.0.0
dev: false dev: false
/data-uri-to-buffer@4.0.1:
resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==}
engines: {node: '>= 12'}
dev: false
/date-fns@2.30.0: /date-fns@2.30.0:
resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==} resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==}
engines: {node: '>=0.11'} engines: {node: '>=0.11'}
@@ -4592,7 +4593,6 @@ packages:
/delayed-stream@1.0.0: /delayed-stream@1.0.0:
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
engines: {node: '>=0.4.0'} engines: {node: '>=0.4.0'}
dev: false
/depd@1.1.2: /depd@1.1.2:
resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==} resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==}
@@ -4726,6 +4726,12 @@ packages:
engines: {node: '>= 0.8'} engines: {node: '>= 0.8'}
dev: false dev: false
/encoding@0.1.13:
resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==}
dependencies:
iconv-lite: 0.6.3
dev: false
/engine.io-client@6.5.2: /engine.io-client@6.5.2:
resolution: {integrity: sha512-CQZqbrpEYnrpGqC07a9dJDz4gePZUgTPMU3NKJPSeQOyw27Tst4Pl3FemKoFGAlHzgZmKjoRmiJvbWfhCXUlIg==} resolution: {integrity: sha512-CQZqbrpEYnrpGqC07a9dJDz4gePZUgTPMU3NKJPSeQOyw27Tst4Pl3FemKoFGAlHzgZmKjoRmiJvbWfhCXUlIg==}
dependencies: dependencies:
@@ -5396,14 +5402,6 @@ packages:
format: 0.2.2 format: 0.2.2
dev: false dev: false
/fetch-blob@3.2.0:
resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
engines: {node: ^12.20 || >= 14.13}
dependencies:
node-domexception: 1.0.0
web-streams-polyfill: 3.2.1
dev: false
/fflate@0.4.8: /fflate@0.4.8:
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==} resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
dev: false dev: false
@@ -5519,7 +5517,6 @@ packages:
asynckit: 0.4.0 asynckit: 0.4.0
combined-stream: 1.0.8 combined-stream: 1.0.8
mime-types: 2.1.35 mime-types: 2.1.35
dev: false
/form-data@4.0.0: /form-data@4.0.0:
resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==}
@@ -5543,13 +5540,6 @@ packages:
web-streams-polyfill: 4.0.0-beta.3 web-streams-polyfill: 4.0.0-beta.3
dev: false dev: false
/formdata-polyfill@4.0.10:
resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==}
engines: {node: '>=12.20.0'}
dependencies:
fetch-blob: 3.2.0
dev: false
/forwarded@0.2.0: /forwarded@0.2.0:
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
engines: {node: '>= 0.6'} engines: {node: '>= 0.6'}
@@ -5965,6 +5955,13 @@ packages:
safer-buffer: 2.1.2 safer-buffer: 2.1.2
dev: false dev: false
/iconv-lite@0.6.3:
resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
engines: {node: '>=0.10.0'}
dependencies:
safer-buffer: 2.1.2
dev: false
/ignore@5.2.4: /ignore@5.2.4:
resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==} resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==}
engines: {node: '>= 4'} engines: {node: '>= 4'}
@@ -6391,6 +6388,30 @@ packages:
object.values: 1.1.6 object.values: 1.1.6
dev: true dev: true
/kysely-codegen@0.10.1(kysely@0.26.1)(pg@8.11.2):
resolution: {integrity: sha512-8Bslh952gN5gtucRv4jTZDFD18RBioS6M50zHfe5kwb5iSyEAunU4ZYMdHzkHraa4zxjg5/183XlOryBCXLRIw==}
hasBin: true
peerDependencies:
better-sqlite3: '>=7.6.2'
kysely: '>=0.19.12'
mysql2: ^2.3.3 || ^3.0.0
pg: ^8.8.0
peerDependenciesMeta:
better-sqlite3:
optional: true
mysql2:
optional: true
pg:
optional: true
dependencies:
chalk: 4.1.2
dotenv: 16.3.1
kysely: 0.26.1
micromatch: 4.0.5
minimist: 1.2.8
pg: 8.11.2
dev: false
/kysely@0.26.1: /kysely@0.26.1:
resolution: {integrity: sha512-FVRomkdZofBu3O8SiwAOXrwbhPZZr8mBN5ZeUWyprH29jzvy6Inzqbd0IMmGxpd4rcOCL9HyyBNWBa8FBqDAdg==} resolution: {integrity: sha512-FVRomkdZofBu3O8SiwAOXrwbhPZZr8mBN5ZeUWyprH29jzvy6Inzqbd0IMmGxpd4rcOCL9HyyBNWBa8FBqDAdg==}
engines: {node: '>=14.0.0'} engines: {node: '>=14.0.0'}
@@ -6611,7 +6632,6 @@ packages:
dependencies: dependencies:
braces: 3.0.2 braces: 3.0.2
picomatch: 2.3.1 picomatch: 2.3.1
dev: true
/mime-db@1.52.0: /mime-db@1.52.0:
resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
@@ -6833,7 +6853,7 @@ packages:
engines: {node: '>=10.5.0'} engines: {node: '>=10.5.0'}
dev: false dev: false
/node-fetch@2.6.12: /node-fetch@2.6.12(encoding@0.1.13):
resolution: {integrity: sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==} resolution: {integrity: sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==}
engines: {node: 4.x || >=6.0.0} engines: {node: 4.x || >=6.0.0}
peerDependencies: peerDependencies:
@@ -6842,18 +6862,10 @@ packages:
encoding: encoding:
optional: true optional: true
dependencies: dependencies:
encoding: 0.1.13
whatwg-url: 5.0.0 whatwg-url: 5.0.0
dev: false dev: false
/node-fetch@3.3.2:
resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
dependencies:
data-uri-to-buffer: 4.0.1
fetch-blob: 3.2.0
formdata-polyfill: 4.0.10
dev: false
/node-mocks-http@1.12.2: /node-mocks-http@1.12.2:
resolution: {integrity: sha512-xhWwC0dh35R9rf0j3bRZXuISXdHxxtMx0ywZQBwjrg3yl7KpRETzogfeCamUIjltpn0Fxvs/ZhGJul1vPLrdJQ==} resolution: {integrity: sha512-xhWwC0dh35R9rf0j3bRZXuISXdHxxtMx0ywZQBwjrg3yl7KpRETzogfeCamUIjltpn0Fxvs/ZhGJul1vPLrdJQ==}
engines: {node: '>=0.6'} engines: {node: '>=0.6'}
@@ -7001,7 +7013,7 @@ packages:
- debug - debug
dev: false dev: false
/openai@4.0.0-beta.7: /openai@4.0.0-beta.7(encoding@0.1.13):
resolution: {integrity: sha512-jHjwvpMuGkNxiQ3erwLZsOvPEhcVrMtwtfNeYmGCjhbdB+oStVw/7pIhIPkualu8rlhLwgMR7awknIaN3IQcOA==} resolution: {integrity: sha512-jHjwvpMuGkNxiQ3erwLZsOvPEhcVrMtwtfNeYmGCjhbdB+oStVw/7pIhIPkualu8rlhLwgMR7awknIaN3IQcOA==}
dependencies: dependencies:
'@types/node': 18.16.0 '@types/node': 18.16.0
@@ -7011,7 +7023,7 @@ packages:
digest-fetch: 1.3.0 digest-fetch: 1.3.0
form-data-encoder: 1.7.2 form-data-encoder: 1.7.2
formdata-node: 4.4.1 formdata-node: 4.4.1
node-fetch: 2.6.12 node-fetch: 2.6.12(encoding@0.1.13)
transitivePeerDependencies: transitivePeerDependencies:
- encoding - encoding
dev: false dev: false
@@ -9111,11 +9123,6 @@ packages:
glob-to-regexp: 0.4.1 glob-to-regexp: 0.4.1
graceful-fs: 4.2.11 graceful-fs: 4.2.11
/web-streams-polyfill@3.2.1:
resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==}
engines: {node: '>= 8'}
dev: false
/web-streams-polyfill@4.0.0-beta.3: /web-streams-polyfill@4.0.0-beta.3:
resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==} resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==}
engines: {node: '>= 14'} engines: {node: '>= 14'}

View File

@@ -7,7 +7,7 @@ databases:
services: services:
- type: web - type: web
name: querykey-prod-web name: querykey-prod-web
env: docker runtime: docker
dockerfilePath: ./app/Dockerfile dockerfilePath: ./app/Dockerfile
dockerContext: . dockerContext: .
plan: standard plan: standard
@@ -21,8 +21,6 @@ services:
name: querykey-prod name: querykey-prod
property: connectionString property: connectionString
- fromGroup: querykey-prod - fromGroup: querykey-prod
- key: NEXT_PUBLIC_SOCKET_URL
value: https://querykey-prod-wss.onrender.com
# Render support says we need to manually set this because otherwise # Render support says we need to manually set this because otherwise
# sometimes it checks a different random port that NextJS opens for # sometimes it checks a different random port that NextJS opens for
# liveness and the liveness check fails. # liveness and the liveness check fails.
@@ -31,8 +29,16 @@ services:
- type: web - type: web
name: querykey-prod-wss name: querykey-prod-wss
env: docker runtime: docker
dockerfilePath: ./app/Dockerfile dockerfilePath: ./app/Dockerfile
dockerContext: . dockerContext: .
plan: free plan: free
dockerCommand: pnpm tsx src/wss-server.ts dockerCommand: pnpm tsx src/wss-server.ts
- type: worker
name: querykey-prod-worker
runtime: docker
dockerfilePath: ./app/Dockerfile
dockerContext: .
plan: starter
dockerCommand: pnpm tsx src/server/tasks/worker.ts