Compare commits

..

14 Commits

Author SHA1 Message Date
Kyle Corbitt
f2135ddc72 Streaming + logging works in Typescript SDK
Also added some high-level tests to minimize the chances that we're breaking anything.

The typescript SDK is mostly functional at this point, with the exception that we don't have a build process or way to import it when deployed as an NPM package.
2023-08-18 08:53:08 -07:00
arcticfly
ca89eafb0b Create new uiId for forked variants and scenarios (#175)
* Create new uiIds for forked variants and scenarios

* Add replaceVariant.mutateAsync to onSave dependencies
2023-08-18 08:09:07 -07:00
arcticfly
b50d47beaf Square header border when scrolled down (#174)
* Square header border when scrolled down

* Remove unused import
2023-08-18 01:41:47 -07:00
arcticfly
733d53625b Add Gryphe/MythoMax-L2-13b (#173) 2023-08-18 00:37:16 -07:00
arcticfly
a5e59e4235 Allow user to delete scenario without variables (#172)
* Allow user to delete scenario without variables

* Hide expand button for empty scenario editor

* Add header to scenario modal
2023-08-18 00:08:32 -07:00
Kyle Corbitt
d0102e3202 Merge pull request #171 from OpenPipe/experiment-slug
Use shorter experiment IDs
2023-08-17 23:33:30 -07:00
Kyle Corbitt
bd571c4c4e Merge pull request #170 from OpenPipe/jobs-log
Enqueue tasks more efficiently
2023-08-17 23:33:20 -07:00
Kyle Corbitt
296eb23d97 Use shorter experiment IDs
Because https://app.openpipe.ai/experiments/B1EtN6oHeXMele2 is a cooler URL than https://app.openpipe.ai/experiments/3692942c-6f1b-4bef-83b1-c11f00a3fbdd
2023-08-17 23:28:56 -07:00
Kyle Corbitt
4e2ae7a441 Enqueue tasks more efficiently
Previously we were opening a new database connection for each task we added. Not a problem at small scale but kinda overwhelming for Postgres now that we have more usage.
2023-08-17 22:42:46 -07:00
Kyle Corbitt
072dcee376 Merge pull request #168 from OpenPipe/jobs-log
Admin dashboard for jobs
2023-08-17 22:26:10 -07:00
Kyle Corbitt
94464c0617 Admin dashboard for jobs
Extremely simple jobs dashboard to sanity-check what we've got going on in the job queue.
2023-08-17 22:20:39 -07:00
arcticfly
980644f13c Support vicuna system message (#167)
* Support vicuna system message

* Change tags to USER and ASSISTANT
2023-08-17 21:02:27 -07:00
arcticfly
6a56250001 Add platypus 13b, vicuna 13b, and nous hermes 7b (#166)
* Add platypus

* Add vicuna 13b and nous hermes 7b
2023-08-17 20:01:10 -07:00
Kyle Corbitt
b1c7bbbd4a Merge pull request #165 from OpenPipe/better-output
Don't define CellWrapper inline
2023-08-17 19:07:32 -07:00
52 changed files with 1374 additions and 608 deletions

View File

@@ -12,6 +12,7 @@ declare module "nextjs-routes" {
export type Route =
| StaticRoute<"/account/signin">
| StaticRoute<"/admin/jobs">
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
| StaticRoute<"/api/experiments/og-image">
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
@@ -20,7 +21,7 @@ declare module "nextjs-routes" {
| StaticRoute<"/dashboard">
| DynamicRoute<"/data/[id]", { "id": string }>
| StaticRoute<"/data">
| DynamicRoute<"/experiments/[id]", { "id": string }>
| DynamicRoute<"/experiments/[experimentSlug]", { "experimentSlug": string }>
| StaticRoute<"/experiments">
| StaticRoute<"/">
| DynamicRoute<"/invitations/[invitationToken]", { "invitationToken": string }>

View File

@@ -18,6 +18,7 @@
"lint": "next lint",
"start": "TZ=UTC next start",
"codegen:clients": "tsx src/server/scripts/client-codegen.ts",
"codegen:db": "prisma generate && kysely-codegen --dialect postgres --out-file src/server/db.types.ts",
"seed": "tsx prisma/seed.ts",
"check": "concurrently 'pnpm lint' 'pnpm tsc' 'pnpm prettier . --check'",
"test": "pnpm vitest"
@@ -65,6 +66,7 @@
"json-stringify-pretty-compact": "^4.0.0",
"jsonschema": "^1.4.1",
"kysely": "^0.26.1",
"kysely-codegen": "^0.10.1",
"lodash-es": "^4.17.21",
"lucide-react": "^0.265.0",
"marked": "^7.0.3",

View File

@@ -0,0 +1,88 @@
/*
* Copyright 2023 Viascom Ltd liab. Co
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION nanoid(
size int DEFAULT 21,
alphabet text DEFAULT '_-0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
)
RETURNS text
LANGUAGE plpgsql
volatile
AS
$$
DECLARE
idBuilder text := '';
counter int := 0;
bytes bytea;
alphabetIndex int;
alphabetArray text[];
alphabetLength int;
mask int;
step int;
BEGIN
alphabetArray := regexp_split_to_array(alphabet, '');
alphabetLength := array_length(alphabetArray, 1);
mask := (2 << cast(floor(log(alphabetLength - 1) / log(2)) as int)) - 1;
step := cast(ceil(1.6 * mask * size / alphabetLength) AS int);
while true
loop
bytes := gen_random_bytes(step);
while counter < step
loop
alphabetIndex := (get_byte(bytes, counter) & mask) + 1;
if alphabetIndex <= alphabetLength then
idBuilder := idBuilder || alphabetArray[alphabetIndex];
if length(idBuilder) = size then
return idBuilder;
end if;
end if;
counter := counter + 1;
end loop;
counter := 0;
end loop;
END
$$;
-- Make a short_nanoid function that uses the default alphabet and length of 15
CREATE OR REPLACE FUNCTION short_nanoid()
RETURNS text
LANGUAGE plpgsql
volatile
AS
$$
BEGIN
RETURN nanoid(15, '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
END
$$;
-- AlterTable
ALTER TABLE "Experiment" ADD COLUMN "slug" TEXT NOT NULL DEFAULT short_nanoid();
-- For existing experiments, keep the existing id as the slug for backwards compatibility
UPDATE "Experiment" SET "slug" = "id";
-- CreateIndex
CREATE UNIQUE INDEX "Experiment_slug_key" ON "Experiment"("slug");

View File

@@ -11,7 +11,9 @@ datasource db {
}
model Experiment {
id String @id @default(uuid()) @db.Uuid
id String @id @default(uuid()) @db.Uuid
slug String @unique @default(dbgenerated("short_nanoid()"))
label String
sortIndex Int @default(0)
@@ -207,14 +209,14 @@ model Project {
personalProjectUserId String? @unique @db.Uuid
personalProjectUser User? @relation(fields: [personalProjectUserId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
projectUsers ProjectUser[]
projectUserInvitations UserInvitation[]
experiments Experiment[]
datasets Dataset[]
loggedCalls LoggedCall[]
apiKeys ApiKey[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
projectUsers ProjectUser[]
projectUserInvitations UserInvitation[]
experiments Experiment[]
datasets Dataset[]
loggedCalls LoggedCall[]
apiKeys ApiKey[]
}
enum ProjectUserRole {
@@ -324,10 +326,10 @@ model LoggedCallModelResponse {
}
model LoggedCallTag {
id String @id @default(uuid()) @db.Uuid
name String
value String?
projectId String @db.Uuid
id String @id @default(uuid()) @db.Uuid
name String
value String?
projectId String @db.Uuid
loggedCallId String @db.Uuid
loggedCall LoggedCall @relation(fields: [loggedCallId], references: [id], onDelete: Cascade)
@@ -391,12 +393,12 @@ model User {
role UserRole @default(USER)
accounts Account[]
sessions Session[]
projectUsers ProjectUser[]
projects Project[]
worldChampEntrant WorldChampEntrant?
sentUserInvitations UserInvitation[]
accounts Account[]
sessions Session[]
projectUsers ProjectUser[]
projects Project[]
worldChampEntrant WorldChampEntrant?
sentUserInvitations UserInvitation[]
createdAt DateTime @default(now())
updatedAt DateTime @default(now()) @updatedAt
@@ -405,17 +407,17 @@ model User {
model UserInvitation {
id String @id @default(uuid()) @db.Uuid
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
email String
role ProjectUserRole
invitationToken String @unique
senderId String @db.Uuid
sender User @relation(fields: [senderId], references: [id], onDelete: Cascade)
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
email String
role ProjectUserRole
invitationToken String @unique
senderId String @db.Uuid
sender User @relation(fields: [senderId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([projectId, email])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model VerificationToken {

View File

@@ -10,6 +10,14 @@ await prisma.project.deleteMany({
where: { id: defaultId },
});
// Mark all users as admins
await prisma.user.updateMany({
where: {},
data: {
role: "ADMIN",
},
});
// If there's an existing project, just seed into it
const project =
(await prisma.project.findFirst({})) ??
@@ -18,12 +26,16 @@ const project =
}));
if (env.OPENPIPE_API_KEY) {
await prisma.apiKey.create({
data: {
await prisma.apiKey.upsert({
where: {
apiKey: env.OPENPIPE_API_KEY,
},
create: {
projectId: project.id,
name: "Default API Key",
apiKey: env.OPENPIPE_API_KEY,
},
update: {},
});
}

View File

@@ -8,7 +8,7 @@ import {
useHandledAsyncCallback,
useVisibleScenarioIds,
} from "~/utils/hooks";
import { cellPadding } from "../constants";
import { cellPadding } from "./constants";
import { ActionButton } from "./ScenariosHeader";
export default function AddVariantButton() {

View File

@@ -16,7 +16,7 @@ import {
VStack,
} from "@chakra-ui/react";
import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
import { cellPadding } from "../constants";
import { cellPadding } from "./constants";
import { FloatingLabelInput } from "./FloatingLabelInput";
import { ScenarioEditorModal } from "./ScenarioEditorModal";
@@ -111,25 +111,23 @@ export default function ScenarioEditor({
onDrop={onReorder}
backgroundColor={isDragTarget ? "gray.100" : "transparent"}
>
{variableLabels.length === 0 ? (
<Box color="gray.500">
{vars.data ? "No scenario variables configured" : "Loading..."}
</Box>
) : (
{
<VStack spacing={4} flex={1} py={2}>
<HStack justifyContent="space-between" w="100%" align="center" spacing={0}>
<Text flex={1}>Scenario</Text>
<Tooltip label="Expand" hasArrow>
<IconButton
aria-label="Expand"
icon={<Icon as={BsArrowsAngleExpand} boxSize={3} />}
onClick={() => setScenarioEditorModalOpen(true)}
size="xs"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
{variableLabels.length && (
<Tooltip label="Expand" hasArrow>
<IconButton
aria-label="Expand"
icon={<Icon as={BsArrowsAngleExpand} boxSize={3} />}
onClick={() => setScenarioEditorModalOpen(true)}
size="xs"
colorScheme="gray"
color="gray.500"
variant="ghost"
/>
</Tooltip>
)}
{canModify && props.canHide && (
<Tooltip label="Delete" hasArrow>
<IconButton
@@ -150,31 +148,38 @@ export default function ScenarioEditor({
</Tooltip>
)}
</HStack>
{variableLabels.map((key) => {
const value = values[key] ?? "";
return (
<FloatingLabelInput
key={key}
label={key}
isDisabled={!canModify}
style={{ width: "100%" }}
maxHeight={32}
value={value}
onChange={(e) => {
setValues((prev) => ({ ...prev, [key]: e.target.value }));
}}
onKeyDown={(e) => {
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
e.preventDefault();
e.currentTarget.blur();
onSave();
}
}}
onMouseEnter={() => setVariableInputHovered(true)}
onMouseLeave={() => setVariableInputHovered(false)}
/>
);
})}
{variableLabels.length === 0 ? (
<Box color="gray.500">
{vars.data ? "No scenario variables configured" : "Loading..."}
</Box>
) : (
variableLabels.map((key) => {
const value = values[key] ?? "";
return (
<FloatingLabelInput
key={key}
label={key}
isDisabled={!canModify}
style={{ width: "100%" }}
maxHeight={32}
value={value}
onChange={(e) => {
setValues((prev) => ({ ...prev, [key]: e.target.value }));
}}
onKeyDown={(e) => {
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
e.preventDefault();
e.currentTarget.blur();
onSave();
}
}}
onMouseEnter={() => setVariableInputHovered(true)}
onMouseLeave={() => setVariableInputHovered(false)}
/>
);
})
)}
{hasChanged && (
<HStack justify="right">
<Button
@@ -192,7 +197,7 @@ export default function ScenarioEditor({
</HStack>
)}
</VStack>
)}
}
</HStack>
{scenarioEditorModalOpen && (
<ScenarioEditorModal

View File

@@ -65,11 +65,11 @@ export const ScenarioEditorModal = ({
<Modal
isOpen
onClose={onClose}
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "5xl", xl: "7xl" }}
size={{ base: "xl", sm: "2xl", md: "3xl", lg: "4xl", xl: "5xl" }}
>
<ModalOverlay />
<ModalContent w={1200}>
<ModalHeader />
<ModalHeader>Edit Scenario</ModalHeader>
<ModalCloseButton />
<ModalBody maxW="unset">
<VStack spacing={8}>

View File

@@ -11,7 +11,7 @@ import {
IconButton,
Spinner,
} from "@chakra-ui/react";
import { cellPadding } from "../constants";
import { cellPadding } from "./constants";
import {
useExperiment,
useExperimentAccess,

View File

@@ -110,7 +110,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
setIsChanged(false);
await utils.promptVariants.list.invalidate();
}, [checkForChanges]);
}, [checkForChanges, replaceVariant.mutateAsync]);
useEffect(() => {
if (monaco) {

View File

@@ -1,11 +1,11 @@
import { useState, type DragEvent } from "react";
import { type PromptVariant } from "../OutputsTable/types";
import { type PromptVariant } from "../types";
import { api } from "~/utils/api";
import { RiDraggable } from "react-icons/ri";
import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here
import { cellPadding, headerMinHeight } from "../constants";
import AutoResizeTextArea from "../AutoResizeTextArea";
import AutoResizeTextArea from "../../AutoResizeTextArea";
import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
export default function VariantHeader(
@@ -75,7 +75,7 @@ export default function VariantHeader(
padding={0}
sx={{
position: "sticky",
top: "-2",
top: "0",
// Ensure that the menu always appears above the sticky header of other variants
zIndex: menuOpen ? "dropdown" : 10,
}}

View File

@@ -1,6 +1,4 @@
import { type PromptVariant } from "../OutputsTable/types";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
import { useState } from "react";
import {
Icon,
Menu,
@@ -14,10 +12,13 @@ import {
} from "@chakra-ui/react";
import { BsFillTrashFill, BsGear, BsStars } from "react-icons/bs";
import { FaRegClone } from "react-icons/fa";
import { useState } from "react";
import { RefinePromptModal } from "../RefinePromptModal/RefinePromptModal";
import { RiExchangeFundsFill } from "react-icons/ri";
import { ChangeModelModal } from "../ChangeModelModal/ChangeModelModal";
import { api } from "~/utils/api";
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
import { type PromptVariant } from "../types";
import { RefinePromptModal } from "../../RefinePromptModal/RefinePromptModal";
import { ChangeModelModal } from "../../ChangeModelModal/ChangeModelModal";
export default function VariantHeaderMenuButton({
variant,

View File

@@ -1,6 +1,6 @@
import { HStack, Icon, Text, useToken } from "@chakra-ui/react";
import { type PromptVariant } from "./types";
import { cellPadding } from "../constants";
import { cellPadding } from "./constants";
import { api } from "~/utils/api";
import chroma from "chroma-js";
import { BsCurrencyDollar } from "react-icons/bs";

View File

@@ -3,13 +3,14 @@ import { api } from "~/utils/api";
import AddVariantButton from "./AddVariantButton";
import ScenarioRow from "./ScenarioRow";
import VariantEditor from "./VariantEditor";
import VariantHeader from "../VariantHeader/VariantHeader";
import VariantHeader from "./VariantHeader/VariantHeader";
import VariantStats from "./VariantStats";
import { ScenariosHeader } from "./ScenariosHeader";
import { borders } from "./styles";
import { useScenarios } from "~/utils/hooks";
import ScenarioPaginator from "./ScenarioPaginator";
import { Fragment } from "react";
import useScrolledPast from "./useHasScrolledPast";
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
const variants = api.promptVariants.list.useQuery(
@@ -18,6 +19,7 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
);
const scenarios = useScenarios();
const shouldFlattenHeader = useScrolledPast(50);
if (!variants.data || !scenarios.data) return null;
@@ -63,8 +65,8 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
variant={variant}
canHide={variants.data.length > 1}
rowStart={1}
borderTopLeftRadius={isFirst ? 8 : 0}
borderTopRightRadius={isLast ? 8 : 0}
borderTopLeftRadius={isFirst && !shouldFlattenHeader ? 8 : 0}
borderTopRightRadius={isLast && !shouldFlattenHeader ? 8 : 0}
{...sharedProps}
/>
<GridItem rowStart={2} {...sharedProps}>

View File

@@ -0,0 +1,34 @@
import { useState, useEffect } from "react";
const useScrolledPast = (scrollThreshold: number) => {
const [hasScrolledPast, setHasScrolledPast] = useState(true);
useEffect(() => {
const container = document.getElementById("output-container");
if (!container) {
console.warn('Element with id "outputs-container" not found.');
return;
}
const checkScroll = () => {
const { scrollTop } = container;
// Check if scrollTop is greater than or equal to scrollThreshold
setHasScrolledPast(scrollTop > scrollThreshold);
};
checkScroll();
container.addEventListener("scroll", checkScroll);
// Cleanup
return () => {
container.removeEventListener("scroll", checkScroll);
};
}, []);
return hasScrolledPast;
};
export default useScrolledPast;

View File

@@ -14,21 +14,11 @@ import { formatTimePast } from "~/utils/dayjs";
import Link from "next/link";
import { useRouter } from "next/router";
import { BsPlusSquare } from "react-icons/bs";
import { api } from "~/utils/api";
import { RouterOutputs, api } from "~/utils/api";
import { useHandledAsyncCallback } from "~/utils/hooks";
import { useAppStore } from "~/state/store";
type ExperimentData = {
testScenarioCount: number;
promptVariantCount: number;
id: string;
label: string;
sortIndex: number;
createdAt: Date;
updatedAt: Date;
};
export const ExperimentCard = ({ exp }: { exp: ExperimentData }) => {
export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["list"][0] }) => {
return (
<Card
w="full"
@@ -45,7 +35,7 @@ export const ExperimentCard = ({ exp }: { exp: ExperimentData }) => {
as={Link}
w="full"
h="full"
href={{ pathname: "/experiments/[id]", query: { id: exp.id } }}
href={{ pathname: "/experiments/[experimentSlug]", query: { experimentSlug: exp.slug } }}
justify="space-between"
>
<HStack w="full" color="gray.700" justify="center">
@@ -89,8 +79,8 @@ export const NewExperimentCard = () => {
projectId: selectedProjectId ?? "",
});
await router.push({
pathname: "/experiments/[id]",
query: { id: newExperiment.id },
pathname: "/experiments/[experimentSlug]",
query: { experimentSlug: newExperiment.slug },
});
}, [createMutation, router, selectedProjectId]);

View File

@@ -16,11 +16,14 @@ export const useOnForkButtonPressed = () => {
const [onFork, isForking] = useHandledAsyncCallback(async () => {
if (!experiment.data?.id || !selectedProjectId) return;
const forkedExperimentId = await forkMutation.mutateAsync({
const newExperiment = await forkMutation.mutateAsync({
id: experiment.data.id,
projectId: selectedProjectId,
});
await router.push({ pathname: "/experiments/[id]", query: { id: forkedExperimentId } });
await router.push({
pathname: "/experiments/[experimentSlug]",
query: { experimentSlug: newExperiment.slug },
});
}, [forkMutation, experiment.data?.id, router]);
const onForkButtonPressed = useCallback(() => {

View File

@@ -67,7 +67,13 @@ export default function ProjectMenu() {
);
return (
<VStack w="full" alignItems="flex-start" spacing={0} py={1}>
<VStack
w="full"
alignItems="flex-start"
spacing={0}
py={1}
zIndex={popover.isOpen ? "dropdown" : undefined}
>
<Popover
placement="bottom"
isOpen={popover.isOpen}

View File

@@ -12,7 +12,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
@@ -29,7 +28,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
@@ -126,7 +124,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
@@ -143,7 +140,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
@@ -237,7 +233,6 @@ export const refinementActions: Record<string, RefinementAction> = {
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
stream: true,
messages: [
{
role: "system",

View File

@@ -3,10 +3,12 @@ import { type FrontendModelProvider } from "../types";
import { refinementActions } from "./refinementActions";
import {
templateOpenOrcaPrompt,
// templateAlpacaInstructPrompt,
templateAlpacaInstructPrompt,
// templateSystemUserAssistantPrompt,
templateInstructionInputResponsePrompt,
templateAiroborosPrompt,
templateGryphePrompt,
templateVicunaPrompt,
} from "./templatePrompt";
const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatOutput> = {
@@ -22,15 +24,16 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatO
learnMoreUrl: "https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
templatePrompt: templateOpenOrcaPrompt,
},
// "Open-Orca/OpenOrca-Platypus2-13B": {
// name: "OpenOrca-Platypus2-13B",
// contextWindow: 4096,
// pricePerSecond: 0.0003,
// speed: "medium",
// provider: "openpipe/Chat",
// learnMoreUrl: "https://huggingface.co/Open-Orca/OpenOrca-Platypus2-13B",
// templatePrompt: templateAlpacaInstructPrompt,
// },
"Open-Orca/OpenOrca-Platypus2-13B": {
name: "OpenOrca-Platypus2-13B",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/Open-Orca/OpenOrca-Platypus2-13B",
templatePrompt: templateAlpacaInstructPrompt,
defaultStopTokens: ["</s>"],
},
// "stabilityai/StableBeluga-13B": {
// name: "StableBeluga-13B",
// contextWindow: 4096,
@@ -58,6 +61,33 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatO
learnMoreUrl: "https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0",
templatePrompt: templateAiroborosPrompt,
},
"lmsys/vicuna-13b-v1.5": {
name: "vicuna-13b-v1.5",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/lmsys/vicuna-13b-v1.5",
templatePrompt: templateVicunaPrompt,
},
"Gryphe/MythoMax-L2-13b": {
name: "MythoMax-L2-13b",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/Gryphe/MythoMax-L2-13b",
templatePrompt: templateGryphePrompt,
},
"NousResearch/Nous-Hermes-llama-2-7b": {
name: "Nous-Hermes-llama-2-7b",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b",
templatePrompt: templateInstructionInputResponsePrompt,
},
},
refinementActions,

View File

@@ -8,10 +8,13 @@ import frontendModelProvider from "./frontend";
const modelEndpoints: Record<OpenpipeChatInput["model"], string> = {
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B": "https://5ef82gjxk8kdys-8000.proxy.runpod.net/v1",
// "Open-Orca/OpenOrca-Platypus2-13B": "https://lt5qlel6qcji8t-8000.proxy.runpod.net/v1",
"Open-Orca/OpenOrca-Platypus2-13B": "https://lt5qlel6qcji8t-8000.proxy.runpod.net/v1",
// "stabilityai/StableBeluga-13B": "https://vcorl8mxni2ou1-8000.proxy.runpod.net/v1",
"NousResearch/Nous-Hermes-Llama2-13b": "https://ncv8pw3u0vb8j2-8000.proxy.runpod.net/v1",
"jondurbin/airoboros-l2-13b-gpt4-2.0": "https://9nrbx7oph4btou-8000.proxy.runpod.net/v1",
"lmsys/vicuna-13b-v1.5": "https://h88hkt3ux73rb7-8000.proxy.runpod.net/v1",
"Gryphe/MythoMax-L2-13b": "https://3l5jvhnxdgky3v-8000.proxy.runpod.net/v1",
"NousResearch/Nous-Hermes-llama-2-7b": "https://ua1bpc6kv3dgge-8000.proxy.runpod.net/v1",
};
export async function getCompletion(
@@ -36,10 +39,20 @@ export async function getCompletion(
const start = Date.now();
let finalCompletion: OpenpipeChatOutput = "";
const completionParams = {
model,
prompt: templatedPrompt,
...rest,
};
if (!completionParams.stop && frontendModelProvider.models[model].defaultStopTokens) {
completionParams.stop = frontendModelProvider.models[model].defaultStopTokens;
}
try {
if (onStream) {
const resp = await openai.completions.create(
{ model, prompt: templatedPrompt, ...rest, stream: true },
{ ...completionParams, stream: true },
{
maxRetries: 0,
},
@@ -58,7 +71,7 @@ export async function getCompletion(
}
} else {
const resp = await openai.completions.create(
{ model, prompt: templatedPrompt, ...rest, stream: false },
{ ...completionParams, stream: false },
{
maxRetries: 0,
},

View File

@@ -6,10 +6,13 @@ import frontendModelProvider from "./frontend";
const supportedModels = [
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
// "Open-Orca/OpenOrca-Platypus2-13B",
"Open-Orca/OpenOrca-Platypus2-13B",
// "stabilityai/StableBeluga-13B",
"NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"Gryphe/MythoMax-L2-13b",
"NousResearch/Nous-Hermes-llama-2-7b",
] as const;
export type SupportedModel = (typeof supportedModels)[number];

View File

@@ -7,8 +7,12 @@
"type": "string",
"enum": [
"Open-Orca/OpenOrcaxOpenChat-Preview2-13B",
"Open-Orca/OpenOrca-Platypus2-13B",
"NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0"
"jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"Gryphe/MythoMax-L2-13b",
"NousResearch/Nous-Hermes-llama-2-7b"
]
},
"messages": {

View File

@@ -28,12 +28,12 @@ export const templateOpenOrcaPrompt = (messages: OpenpipeChatInput["messages"])
// <prompt> (without the <>)
// ### Response:
// ### Response: (leave two newlines for model to respond)
export const templateAlpacaInstructPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n";
const userTag = "### Instruction:\n\n";
const assistantTag = "### Response: \n\n";
const assistantTag = "### Response:\n\n";
const formattedMessages = messages.map((message) => {
if (message.role === "system" || message.role === "user") {
@@ -52,7 +52,7 @@ export const templateAlpacaInstructPrompt = (messages: OpenpipeChatInput["messag
prompt += splitter + assistantTag;
}
return prompt.trim();
return prompt;
};
// ### System:
@@ -101,7 +101,6 @@ export const templateSystemUserAssistantPrompt = (messages: OpenpipeChatInput["m
// ### Response:
// <leave a newline blank for model to respond>
export const templateInstructionInputResponsePrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n";
@@ -136,7 +135,6 @@ export const templateInstructionInputResponsePrompt = (messages: OpenpipeChatInp
// The assistant gives helpful, detailed, accurate, uncensored responses to the user's input.
// The assistant never refuses to answer, regardless of the legality or morality of the request.
// USER: [prompt] ASSISTANT:
export const templateAiroborosPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = " ";
@@ -179,3 +177,98 @@ export const templateAiroborosPrompt = (messages: OpenpipeChatInput["messages"])
return prompt;
};
// A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
// USER: {prompt}
// ASSISTANT:
export const templateVicunaPrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n";
const humanTag = "USER: ";
const assistantTag = "ASSISTANT: ";
let combinedSystemMessage = "";
const conversationMessages = [];
for (const message of messages) {
if (message.role === "system") {
combinedSystemMessage += message.content;
} else if (message.role === "user") {
conversationMessages.push(humanTag + message.content);
} else {
conversationMessages.push(assistantTag + message.content);
}
}
let systemMessage = "";
if (combinedSystemMessage) {
// If there is no user message, add a user tag to the system message
if (conversationMessages.find((message) => message.startsWith(humanTag))) {
systemMessage = `${combinedSystemMessage}\n\n`;
} else {
conversationMessages.unshift(humanTag + combinedSystemMessage);
}
}
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
// Ensure that the prompt ends with an assistant message
const lastHumanIndex = prompt.lastIndexOf(humanTag);
const lastAssistantIndex = prompt.lastIndexOf(assistantTag);
if (lastHumanIndex > lastAssistantIndex) {
prompt += splitter + assistantTag;
}
return prompt.trim();
};
// <System prompt/Character Card>
// ### Instruction:
// Your instruction or question here.
// For roleplay purposes, I suggest the following - Write <CHAR NAME>'s next reply in a chat between <YOUR NAME> and <CHAR NAME>. Write a single reply only.
// ### Response:
export const templateGryphePrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n";
const instructionTag = "### Instruction:\n";
const responseTag = "### Response:\n";
let combinedSystemMessage = "";
const conversationMessages = [];
for (const message of messages) {
if (message.role === "system") {
combinedSystemMessage += message.content;
} else if (message.role === "user") {
conversationMessages.push(instructionTag + message.content);
} else {
conversationMessages.push(responseTag + message.content);
}
}
let systemMessage = "";
if (combinedSystemMessage) {
// If there is no user message, add a user tag to the system message
if (conversationMessages.find((message) => message.startsWith(instructionTag))) {
systemMessage = `${combinedSystemMessage}\n\n`;
} else {
conversationMessages.unshift(instructionTag + combinedSystemMessage);
}
}
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
// Ensure that the prompt ends with an assistant message
const lastInstructionIndex = prompt.lastIndexOf(instructionTag);
const lastAssistantIndex = prompt.lastIndexOf(responseTag);
if (lastInstructionIndex > lastAssistantIndex) {
prompt += splitter + responseTag;
}
return prompt;
};

View File

@@ -25,6 +25,7 @@ export type Model = {
learnMoreUrl?: string;
apiDocsUrl?: string;
templatePrompt?: (initialPrompt: OpenpipeChatInput["messages"]) => string;
defaultStopTokens?: string[];
};
export type ProviderModel = { provider: z.infer<typeof ZodSupportedProvider>; model: string };

View File

@@ -0,0 +1,54 @@
import { Card, Table, Tbody, Td, Th, Thead, Tr } from "@chakra-ui/react";
import dayjs from "dayjs";
import { isDate, isObject, isString } from "lodash-es";
import AppShell from "~/components/nav/AppShell";
import { type RouterOutputs, api } from "~/utils/api";
const fieldsToShow: (keyof RouterOutputs["adminJobs"]["list"][0])[] = [
"id",
"queue_name",
"payload",
"priority",
"attempts",
"last_error",
"created_at",
"key",
"locked_at",
"run_at",
];
export default function Jobs() {
const jobs = api.adminJobs.list.useQuery({});
return (
<AppShell title="Admin Jobs">
<Card m={4} overflowX="auto">
<Table>
<Thead>
<Tr>
{fieldsToShow.map((field) => (
<Th key={field}>{field}</Th>
))}
</Tr>
</Thead>
<Tbody>
{jobs.data?.map((job) => (
<Tr key={job.id}>
{fieldsToShow.map((field) => {
// Check if object
let value = job[field];
if (isDate(value)) {
value = dayjs(value).format("YYYY-MM-DD HH:mm:ss");
} else if (isObject(value) && !isString(value)) {
value = JSON.stringify(value);
} // check if date
return <Td key={field}>{value}</Td>;
})}
</Tr>
))}
</Tbody>
</Table>
</Card>
</AppShell>
);
}

View File

@@ -33,9 +33,9 @@ export default function Experiment() {
const experiment = useExperiment();
const experimentStats = api.experiments.stats.useQuery(
{ id: router.query.id as string },
{ id: experiment.data?.id as string },
{
enabled: !!router.query.id,
enabled: !!experiment.data?.id,
},
);
const stats = experimentStats.data;
@@ -124,8 +124,8 @@ export default function Experiment() {
<ExperimentHeaderButtons />
</PageHeaderContainer>
<ExperimentSettingsDrawer />
<Box w="100%" overflowX="auto" flex={1}>
<OutputsTable experimentId={router.query.id as string | undefined} />
<Box w="100%" overflowX="auto" flex={1} id="output-container">
<OutputsTable experimentId={experiment.data?.id} />
</Box>
</VStack>
</AppShell>

View File

@@ -66,7 +66,7 @@ export const v1ApiRouter = createOpenApiRouter({
if (!existingResponse) return { respPayload: null };
await prisma.loggedCall.create({
const newCall = await prisma.loggedCall.create({
data: {
projectId: ctx.key.projectId,
requestedAt: new Date(input.requestedAt),
@@ -75,11 +75,7 @@ export const v1ApiRouter = createOpenApiRouter({
},
});
await createTags(
existingResponse.originalLoggedCall.projectId,
existingResponse.originalLoggedCallId,
input.tags,
);
await createTags(newCall.projectId, newCall.id, input.tags);
return {
respPayload: existingResponse.respPayload,
};
@@ -111,7 +107,7 @@ export const v1ApiRouter = createOpenApiRouter({
.default({}),
}),
)
.output(z.object({ status: z.literal("ok") }))
.output(z.object({ status: z.union([z.literal("ok"), z.literal("error")]) }))
.mutation(async ({ input, ctx }) => {
const reqPayload = await reqValidator.spa(input.reqPayload);
const respPayload = await respValidator.spa(input.respPayload);
@@ -212,6 +208,7 @@ export const v1ApiRouter = createOpenApiRouter({
createdAt: true,
cacheHit: true,
tags: true,
id: true,
modelResponse: {
select: {
id: true,
@@ -237,7 +234,7 @@ async function createTags(projectId: string, loggedCallId: string, tags: Record<
const tagsToCreate = Object.entries(tags).map(([name, value]) => ({
projectId,
loggedCallId,
name: name.replaceAll(/[^a-zA-Z0-9_$]/g, "_"),
name: name.replaceAll(/[^a-zA-Z0-9_$.]/g, "_"),
value,
}));
await prisma.loggedCallTag.createMany({

View File

@@ -12,6 +12,7 @@ import { projectsRouter } from "./routers/projects.router";
import { dashboardRouter } from "./routers/dashboard.router";
import { loggedCallsRouter } from "./routers/loggedCalls.router";
import { usersRouter } from "./routers/users.router";
import { adminJobsRouter } from "./routers/adminJobs.router";
/**
* This is the primary router for your server.
@@ -32,6 +33,7 @@ export const appRouter = createTRPCRouter({
dashboard: dashboardRouter,
loggedCalls: loggedCallsRouter,
users: usersRouter,
adminJobs: adminJobsRouter,
});
// export type definition of API

View File

@@ -0,0 +1,18 @@
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { kysely } from "~/server/db";
import { requireIsAdmin } from "~/utils/accessControl";
export const adminJobsRouter = createTRPCRouter({
list: protectedProcedure.input(z.object({})).query(async ({ ctx }) => {
await requireIsAdmin(ctx);
return await kysely
.selectFrom("graphile_worker.jobs")
.limit(100)
.selectAll()
.orderBy("created_at", "desc")
.execute();
}),
});

View File

@@ -85,15 +85,16 @@ export const experimentsRouter = createTRPCRouter({
return experimentsWithCounts;
}),
get: publicProcedure.input(z.object({ id: z.string() })).query(async ({ input, ctx }) => {
await requireCanViewExperiment(input.id, ctx);
get: publicProcedure.input(z.object({ slug: z.string() })).query(async ({ input, ctx }) => {
const experiment = await prisma.experiment.findFirstOrThrow({
where: { id: input.id },
where: { slug: input.slug },
include: {
project: true,
},
});
await requireCanViewExperiment(experiment.id, ctx);
const canModify = ctx.session?.user.id
? await canModifyExperiment(experiment.id, ctx.session?.user.id)
: false;
@@ -177,6 +178,7 @@ export const experimentsRouter = createTRPCRouter({
existingToNewVariantIds.set(variant.id, newVariantId);
variantsToCreate.push({
...variant,
uiId: uuidv4(),
id: newVariantId,
experimentId: newExperimentId,
});
@@ -190,6 +192,7 @@ export const experimentsRouter = createTRPCRouter({
scenariosToCreate.push({
...scenario,
id: newScenarioId,
uiId: uuidv4(),
experimentId: newExperimentId,
variableValues: scenario.variableValues as Prisma.InputJsonValue,
});
@@ -290,7 +293,10 @@ export const experimentsRouter = createTRPCRouter({
}),
]);
return newExperimentId;
const newExperiment = await prisma.experiment.findUniqueOrThrow({
where: { id: newExperimentId },
});
return newExperiment;
}),
create: protectedProcedure
@@ -335,7 +341,6 @@ export const experimentsRouter = createTRPCRouter({
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo-0613",
stream: true,
messages: [
{
role: "system",

View File

@@ -1,27 +1,6 @@
import {
type Experiment,
type PromptVariant,
type TestScenario,
type TemplateVariable,
type ScenarioVariantCell,
type ModelResponse,
type Evaluation,
type OutputEvaluation,
type Dataset,
type DatasetEntry,
type Project,
type ProjectUser,
type WorldChampEntrant,
type LoggedCall,
type LoggedCallModelResponse,
type LoggedCallTag,
type ApiKey,
type Account,
type Session,
type User,
type VerificationToken,
PrismaClient,
} from "@prisma/client";
import { type DB } from "./db.types";
import { PrismaClient } from "@prisma/client";
import { Kysely, PostgresDialect } from "kysely";
// TODO: Revert to normal import when our tsconfig.json is fixed
// import { Pool } from "pg";
@@ -32,30 +11,6 @@ const Pool = (UntypedPool.default ? UntypedPool.default : UntypedPool) as typeof
import { env } from "~/env.mjs";
interface DB {
Experiment: Experiment;
PromptVariant: PromptVariant;
TestScenario: TestScenario;
TemplateVariable: TemplateVariable;
ScenarioVariantCell: ScenarioVariantCell;
ModelResponse: ModelResponse;
Evaluation: Evaluation;
OutputEvaluation: OutputEvaluation;
Dataset: Dataset;
DatasetEntry: DatasetEntry;
Project: Project;
ProjectUser: ProjectUser;
WorldChampEntrant: WorldChampEntrant;
LoggedCall: LoggedCall;
LoggedCallModelResponse: LoggedCallModelResponse;
LoggedCallTag: LoggedCallTag;
ApiKey: ApiKey;
Account: Account;
Session: Session;
User: User;
VerificationToken: VerificationToken;
}
const globalForPrisma = globalThis as unknown as {
prisma: PrismaClient | undefined;
};

336
app/src/server/db.types.ts Normal file
View File

@@ -0,0 +1,336 @@
import type { ColumnType } from "kysely";
export type Generated<T> = T extends ColumnType<infer S, infer I, infer U>
? ColumnType<S, I | undefined, U>
: ColumnType<T, T | undefined, T>;
export type Int8 = ColumnType<string, string | number | bigint, string | number | bigint>;
export type Json = ColumnType<JsonValue, string, string>;
export type JsonArray = JsonValue[];
export type JsonObject = {
[K in string]?: JsonValue;
};
export type JsonPrimitive = boolean | null | number | string;
export type JsonValue = JsonArray | JsonObject | JsonPrimitive;
export type Numeric = ColumnType<string, string | number, string | number>;
export type Timestamp = ColumnType<Date, Date | string, Date | string>;
export interface _PrismaMigrations {
id: string;
checksum: string;
finished_at: Timestamp | null;
migration_name: string;
logs: string | null;
rolled_back_at: Timestamp | null;
started_at: Generated<Timestamp>;
applied_steps_count: Generated<number>;
}
export interface Account {
id: string;
userId: string;
type: string;
provider: string;
providerAccountId: string;
refresh_token: string | null;
refresh_token_expires_in: number | null;
access_token: string | null;
expires_at: number | null;
token_type: string | null;
scope: string | null;
id_token: string | null;
session_state: string | null;
}
export interface ApiKey {
id: string;
name: string;
apiKey: string;
projectId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface Dataset {
id: string;
name: string;
projectId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface DatasetEntry {
id: string;
input: string;
output: string | null;
datasetId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface Evaluation {
id: string;
label: string;
value: string;
evalType: "CONTAINS" | "DOES_NOT_CONTAIN" | "GPT4_EVAL";
experimentId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface Experiment {
id: string;
label: string;
sortIndex: Generated<number>;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
projectId: string;
}
export interface GraphileWorkerJobQueues {
queue_name: string;
job_count: number;
locked_at: Timestamp | null;
locked_by: string | null;
}
export interface GraphileWorkerJobs {
id: Generated<Int8>;
queue_name: string | null;
task_identifier: string;
payload: Generated<Json>;
priority: Generated<number>;
run_at: Generated<Timestamp>;
attempts: Generated<number>;
max_attempts: Generated<number>;
last_error: string | null;
created_at: Generated<Timestamp>;
updated_at: Generated<Timestamp>;
key: string | null;
locked_at: Timestamp | null;
locked_by: string | null;
revision: Generated<number>;
flags: Json | null;
}
export interface GraphileWorkerKnownCrontabs {
identifier: string;
known_since: Timestamp;
last_execution: Timestamp | null;
}
export interface GraphileWorkerMigrations {
id: number;
ts: Generated<Timestamp>;
}
export interface LoggedCall {
id: string;
requestedAt: Timestamp;
cacheHit: boolean;
modelResponseId: string | null;
projectId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
model: string | null;
}
export interface LoggedCallModelResponse {
id: string;
reqPayload: Json;
statusCode: number | null;
respPayload: Json | null;
errorMessage: string | null;
requestedAt: Timestamp;
receivedAt: Timestamp;
cacheKey: string | null;
durationMs: number | null;
inputTokens: number | null;
outputTokens: number | null;
finishReason: string | null;
completionId: string | null;
cost: Numeric | null;
originalLoggedCallId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface LoggedCallTag {
id: string;
name: string;
value: string | null;
loggedCallId: string;
projectId: string;
}
export interface ModelResponse {
id: string;
cacheKey: string;
respPayload: Json | null;
inputTokens: number | null;
outputTokens: number | null;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
scenarioVariantCellId: string;
cost: number | null;
requestedAt: Timestamp | null;
receivedAt: Timestamp | null;
statusCode: number | null;
errorMessage: string | null;
retryTime: Timestamp | null;
outdated: Generated<boolean>;
}
export interface OutputEvaluation {
id: string;
result: number;
details: string | null;
modelResponseId: string;
evaluationId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface Project {
id: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
personalProjectUserId: string | null;
name: Generated<string>;
}
export interface ProjectUser {
id: string;
role: "ADMIN" | "MEMBER" | "VIEWER";
projectId: string;
userId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface PromptVariant {
id: string;
label: string;
uiId: string;
visible: Generated<boolean>;
sortIndex: Generated<number>;
experimentId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
promptConstructor: string;
model: string;
promptConstructorVersion: number;
modelProvider: string;
}
export interface ScenarioVariantCell {
id: string;
errorMessage: string | null;
promptVariantId: string;
testScenarioId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
retrievalStatus: Generated<"COMPLETE" | "ERROR" | "IN_PROGRESS" | "PENDING">;
prompt: Json | null;
jobQueuedAt: Timestamp | null;
jobStartedAt: Timestamp | null;
}
export interface Session {
id: string;
sessionToken: string;
userId: string;
expires: Timestamp;
}
export interface TemplateVariable {
id: string;
label: string;
experimentId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface TestScenario {
id: string;
variableValues: Json;
uiId: string;
visible: Generated<boolean>;
sortIndex: Generated<number>;
experimentId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface User {
id: string;
name: string | null;
email: string | null;
emailVerified: Timestamp | null;
image: string | null;
createdAt: Generated<Timestamp>;
updatedAt: Generated<Timestamp>;
role: Generated<"ADMIN" | "USER">;
}
export interface UserInvitation {
id: string;
projectId: string;
email: string;
role: "ADMIN" | "MEMBER" | "VIEWER";
invitationToken: string;
senderId: string;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface VerificationToken {
identifier: string;
token: string;
expires: Timestamp;
}
export interface WorldChampEntrant {
id: string;
userId: string;
approved: Generated<boolean>;
createdAt: Generated<Timestamp>;
updatedAt: Timestamp;
}
export interface DB {
_prisma_migrations: _PrismaMigrations;
Account: Account;
ApiKey: ApiKey;
Dataset: Dataset;
DatasetEntry: DatasetEntry;
Evaluation: Evaluation;
Experiment: Experiment;
"graphile_worker.job_queues": GraphileWorkerJobQueues;
"graphile_worker.jobs": GraphileWorkerJobs;
"graphile_worker.known_crontabs": GraphileWorkerKnownCrontabs;
"graphile_worker.migrations": GraphileWorkerMigrations;
LoggedCall: LoggedCall;
LoggedCallModelResponse: LoggedCallModelResponse;
LoggedCallTag: LoggedCallTag;
ModelResponse: ModelResponse;
OutputEvaluation: OutputEvaluation;
Project: Project;
ProjectUser: ProjectUser;
PromptVariant: PromptVariant;
ScenarioVariantCell: ScenarioVariantCell;
Session: Session;
TemplateVariable: TemplateVariable;
TestScenario: TestScenario;
User: User;
UserInvitation: UserInvitation;
VerificationToken: VerificationToken;
WorldChampEntrant: WorldChampEntrant;
}

View File

@@ -1,19 +0,0 @@
import "dotenv/config";
import { openai } from "../utils/openai";
const resp = await openai.chat.completions.create({
model: "gpt-3.5-turbo-0613",
stream: true,
messages: [
{
role: "user",
content: "count to 20",
},
],
});
for await (const part of resp) {
console.log("part", part);
}
console.log("final resp", resp);

View File

@@ -1,15 +1,24 @@
// Import necessary dependencies
import { quickAddJob, type Helpers, type Task } from "graphile-worker";
import { type Helpers, type Task, makeWorkerUtils } from "graphile-worker";
import { env } from "~/env.mjs";
// Define the defineTask function
let workerUtilsPromise: ReturnType<typeof makeWorkerUtils> | null = null;
function workerUtils() {
if (!workerUtilsPromise) {
workerUtilsPromise = makeWorkerUtils({
connectionString: env.DATABASE_URL,
});
}
return workerUtilsPromise;
}
function defineTask<TPayload>(
taskIdentifier: string,
taskHandler: (payload: TPayload, helpers: Helpers) => Promise<void>,
) {
const enqueue = async (payload: TPayload, runAt?: Date) => {
console.log("Enqueuing task", taskIdentifier, payload);
await quickAddJob({ connectionString: env.DATABASE_URL }, taskIdentifier, payload, { runAt });
await (await workerUtils()).addJob(taskIdentifier, payload, { runAt });
};
const handler = (payload: TPayload, helpers: Helpers) => {

View File

@@ -17,6 +17,8 @@ export const requireNothing = (ctx: TRPCContext) => {
};
export const requireIsProjectAdmin = async (projectId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id;
if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" });
@@ -33,11 +35,11 @@ export const requireIsProjectAdmin = async (projectId: string, ctx: TRPCContext)
if (!isAdmin) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
ctx.markAccessControlRun();
};
export const requireCanViewProject = async (projectId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id;
if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" });
@@ -53,11 +55,11 @@ export const requireCanViewProject = async (projectId: string, ctx: TRPCContext)
if (!canView) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
ctx.markAccessControlRun();
};
export const requireCanModifyProject = async (projectId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id;
if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" });
@@ -74,11 +76,11 @@ export const requireCanModifyProject = async (projectId: string, ctx: TRPCContex
if (!canModify) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
ctx.markAccessControlRun();
};
export const requireCanViewDataset = async (datasetId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const dataset = await prisma.dataset.findFirst({
where: {
id: datasetId,
@@ -96,8 +98,6 @@ export const requireCanViewDataset = async (datasetId: string, ctx: TRPCContext)
if (!dataset) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
ctx.markAccessControlRun();
};
export const requireCanModifyDataset = async (datasetId: string, ctx: TRPCContext) => {
@@ -105,13 +105,10 @@ export const requireCanModifyDataset = async (datasetId: string, ctx: TRPCContex
await requireCanViewDataset(datasetId, ctx);
};
export const requireCanViewExperiment = async (experimentId: string, ctx: TRPCContext) => {
await prisma.experiment.findFirst({
where: { id: experimentId },
});
export const requireCanViewExperiment = (experimentId: string, ctx: TRPCContext): Promise<void> => {
// Right now all experiments are publicly viewable, so this is a no-op.
ctx.markAccessControlRun();
return Promise.resolve();
};
export const canModifyExperiment = async (experimentId: string, userId: string) => {
@@ -136,6 +133,8 @@ export const canModifyExperiment = async (experimentId: string, userId: string)
};
export const requireCanModifyExperiment = async (experimentId: string, ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id;
if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" });
@@ -144,6 +143,17 @@ export const requireCanModifyExperiment = async (experimentId: string, ctx: TRPC
if (!(await canModifyExperiment(experimentId, userId))) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
ctx.markAccessControlRun();
};
export const requireIsAdmin = async (ctx: TRPCContext) => {
ctx.markAccessControlRun();
const userId = ctx.session?.user.id;
if (!userId) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
if (!(await isAdmin(userId))) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
};

View File

@@ -15,8 +15,8 @@ export const useExperiments = () => {
export const useExperiment = () => {
const router = useRouter();
const experiment = api.experiments.get.useQuery(
{ id: router.query.id as string },
{ enabled: !!router.query.id },
{ slug: router.query.experimentSlug as string },
{ enabled: !!router.query.experimentSlug },
);
return experiment;

View File

@@ -141,9 +141,19 @@
"type": "object",
"properties": {
"status": {
"type": "string",
"enum": [
"ok"
"anyOf": [
{
"type": "string",
"enum": [
"ok"
]
},
{
"type": "string",
"enum": [
"error"
]
}
]
}
},

View File

@@ -13,7 +13,8 @@ from .local_testing_only_get_latest_logged_call_response_200_tags import (
from .report_json_body import ReportJsonBody
from .report_json_body_tags import ReportJsonBodyTags
from .report_response_200 import ReportResponse200
from .report_response_200_status import ReportResponse200Status
from .report_response_200_status_type_0 import ReportResponse200StatusType0
from .report_response_200_status_type_1 import ReportResponse200StatusType1
__all__ = (
"CheckCacheJsonBody",
@@ -25,5 +26,6 @@ __all__ = (
"ReportJsonBody",
"ReportJsonBodyTags",
"ReportResponse200",
"ReportResponse200Status",
"ReportResponse200StatusType0",
"ReportResponse200StatusType1",
)

View File

@@ -1,8 +1,9 @@
from typing import Any, Dict, Type, TypeVar
from typing import Any, Dict, Type, TypeVar, Union
from attrs import define
from ..models.report_response_200_status import ReportResponse200Status
from ..models.report_response_200_status_type_0 import ReportResponse200StatusType0
from ..models.report_response_200_status_type_1 import ReportResponse200StatusType1
T = TypeVar("T", bound="ReportResponse200")
@@ -11,13 +12,19 @@ T = TypeVar("T", bound="ReportResponse200")
class ReportResponse200:
"""
Attributes:
status (ReportResponse200Status):
status (Union[ReportResponse200StatusType0, ReportResponse200StatusType1]):
"""
status: ReportResponse200Status
status: Union[ReportResponse200StatusType0, ReportResponse200StatusType1]
def to_dict(self) -> Dict[str, Any]:
status = self.status.value
status: str
if isinstance(self.status, ReportResponse200StatusType0):
status = self.status.value
else:
status = self.status.value
field_dict: Dict[str, Any] = {}
field_dict.update(
@@ -31,7 +38,23 @@ class ReportResponse200:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
status = ReportResponse200Status(d.pop("status"))
def _parse_status(data: object) -> Union[ReportResponse200StatusType0, ReportResponse200StatusType1]:
try:
if not isinstance(data, str):
raise TypeError()
status_type_0 = ReportResponse200StatusType0(data)
return status_type_0
except: # noqa: E722
pass
if not isinstance(data, str):
raise TypeError()
status_type_1 = ReportResponse200StatusType1(data)
return status_type_1
status = _parse_status(d.pop("status"))
report_response_200 = cls(
status=status,

View File

@@ -1,7 +1,7 @@
from enum import Enum
class ReportResponse200Status(str, Enum):
class ReportResponse200StatusType0(str, Enum):
OK = "ok"
def __str__(self) -> str:

View File

@@ -0,0 +1,8 @@
from enum import Enum
class ReportResponse200StatusType1(str, Enum):
ERROR = "error"
def __str__(self) -> str:
return str(self.value)

View File

@@ -24,10 +24,18 @@ def _get_tags(openpipe_options):
return ReportJsonBodyTags.from_dict(tags)
def _should_check_cache(openpipe_options):
def _should_check_cache(openpipe_options, req_payload):
if configured_client.token == "":
return False
return openpipe_options.get("cache", False)
cache_requested = openpipe_options.get("cache", False)
streaming = req_payload.get("stream", False)
if cache_requested and streaming:
print(
"Caching is not yet supported for streaming requests. Ignoring cache flag. Vote for this feature at https://github.com/OpenPipe/OpenPipe/issues/159"
)
return False
return cache_requested
def _process_cache_payload(
@@ -44,7 +52,7 @@ def maybe_check_cache(
openpipe_options={},
req_payload={},
):
if not _should_check_cache(openpipe_options):
if not _should_check_cache(openpipe_options, req_payload):
return None
try:
payload = check_cache.sync(
@@ -68,7 +76,7 @@ async def maybe_check_cache_async(
openpipe_options={},
req_payload={},
):
if not _should_check_cache(openpipe_options):
if not _should_check_cache(openpipe_options, req_payload):
return None
try:

View File

@@ -13,15 +13,17 @@
"author": "",
"license": "Apache-2.0",
"dependencies": {
"encoding": "^0.1.13",
"form-data": "^4.0.0",
"lodash-es": "^4.17.21",
"node-fetch": "^3.3.2",
"node-fetch": "^2.6.12",
"openai-beta": "npm:openai@4.0.0-beta.7",
"openai-legacy": "npm:openai@3.3.0"
},
"devDependencies": {
"@types/lodash-es": "^4.17.8",
"@types/node": "^20.4.8",
"@types/node-fetch": "^2.6.4",
"dotenv": "^16.3.1",
"tsx": "^3.12.7",
"typescript": "^5.0.4",

View File

@@ -2,301 +2,283 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import FormData from "form-data";
import fetch, { Headers } from "node-fetch";
import type { RequestInit, Response } from "node-fetch";
import FormData from 'form-data';
import fetch, { Headers } from 'node-fetch';
import type { RequestInit, Response } from 'node-fetch';
import type { AbortSignal } from 'node-fetch/externals';
// @ts-expect-error TODO maybe I need an older node-fetch or something?
import type { AbortSignal } from "node-fetch/externals";
import { ApiError } from './ApiError';
import type { ApiRequestOptions } from './ApiRequestOptions';
import type { ApiResult } from './ApiResult';
import { CancelablePromise } from './CancelablePromise';
import type { OnCancel } from './CancelablePromise';
import type { OpenAPIConfig } from './OpenAPI';
import { ApiError } from "./ApiError";
import type { ApiRequestOptions } from "./ApiRequestOptions";
import type { ApiResult } from "./ApiResult";
import { CancelablePromise } from "./CancelablePromise";
import type { OnCancel } from "./CancelablePromise";
import type { OpenAPIConfig } from "./OpenAPI";
export const isDefined = <T>(
value: T | null | undefined
): value is Exclude<T, null | undefined> => {
return value !== undefined && value !== null;
export const isDefined = <T>(value: T | null | undefined): value is Exclude<T, null | undefined> => {
return value !== undefined && value !== null;
};
export const isString = (value: any): value is string => {
return typeof value === "string";
return typeof value === 'string';
};
export const isStringWithValue = (value: any): value is string => {
return isString(value) && value !== "";
return isString(value) && value !== '';
};
export const isBlob = (value: any): value is Blob => {
return (
typeof value === "object" &&
typeof value.type === "string" &&
typeof value.stream === "function" &&
typeof value.arrayBuffer === "function" &&
typeof value.constructor === "function" &&
typeof value.constructor.name === "string" &&
/^(Blob|File)$/.test(value.constructor.name) &&
/^(Blob|File)$/.test(value[Symbol.toStringTag])
);
return (
typeof value === 'object' &&
typeof value.type === 'string' &&
typeof value.stream === 'function' &&
typeof value.arrayBuffer === 'function' &&
typeof value.constructor === 'function' &&
typeof value.constructor.name === 'string' &&
/^(Blob|File)$/.test(value.constructor.name) &&
/^(Blob|File)$/.test(value[Symbol.toStringTag])
);
};
export const isFormData = (value: any): value is FormData => {
return value instanceof FormData;
return value instanceof FormData;
};
export const base64 = (str: string): string => {
try {
return btoa(str);
} catch (err) {
// @ts-ignore
return Buffer.from(str).toString("base64");
}
try {
return btoa(str);
} catch (err) {
// @ts-ignore
return Buffer.from(str).toString('base64');
}
};
export const getQueryString = (params: Record<string, any>): string => {
const qs: string[] = [];
const qs: string[] = [];
const append = (key: string, value: any) => {
qs.push(`${encodeURIComponent(key)}=${encodeURIComponent(String(value))}`);
};
const append = (key: string, value: any) => {
qs.push(`${encodeURIComponent(key)}=${encodeURIComponent(String(value))}`);
};
const process = (key: string, value: any) => {
if (isDefined(value)) {
if (Array.isArray(value)) {
value.forEach((v) => {
process(key, v);
});
} else if (typeof value === "object") {
Object.entries(value).forEach(([k, v]) => {
process(`${key}[${k}]`, v);
});
} else {
append(key, value);
}
const process = (key: string, value: any) => {
if (isDefined(value)) {
if (Array.isArray(value)) {
value.forEach(v => {
process(key, v);
});
} else if (typeof value === 'object') {
Object.entries(value).forEach(([k, v]) => {
process(`${key}[${k}]`, v);
});
} else {
append(key, value);
}
}
};
Object.entries(params).forEach(([key, value]) => {
process(key, value);
});
if (qs.length > 0) {
return `?${qs.join('&')}`;
}
};
Object.entries(params).forEach(([key, value]) => {
process(key, value);
});
if (qs.length > 0) {
return `?${qs.join("&")}`;
}
return "";
return '';
};
const getUrl = (config: OpenAPIConfig, options: ApiRequestOptions): string => {
const encoder = config.ENCODE_PATH || encodeURI;
const encoder = config.ENCODE_PATH || encodeURI;
const path = options.url
.replace("{api-version}", config.VERSION)
.replace(/{(.*?)}/g, (substring: string, group: string) => {
if (options.path?.hasOwnProperty(group)) {
return encoder(String(options.path[group]));
}
return substring;
});
const path = options.url
.replace('{api-version}', config.VERSION)
.replace(/{(.*?)}/g, (substring: string, group: string) => {
if (options.path?.hasOwnProperty(group)) {
return encoder(String(options.path[group]));
}
return substring;
});
const url = `${config.BASE}${path}`;
if (options.query) {
return `${url}${getQueryString(options.query)}`;
}
return url;
const url = `${config.BASE}${path}`;
if (options.query) {
return `${url}${getQueryString(options.query)}`;
}
return url;
};
export const getFormData = (options: ApiRequestOptions): FormData | undefined => {
if (options.formData) {
const formData = new FormData();
if (options.formData) {
const formData = new FormData();
const process = (key: string, value: any) => {
if (isString(value) || isBlob(value)) {
formData.append(key, value);
} else {
formData.append(key, JSON.stringify(value));
}
};
const process = (key: string, value: any) => {
if (isString(value) || isBlob(value)) {
formData.append(key, value);
} else {
formData.append(key, JSON.stringify(value));
}
};
Object.entries(options.formData)
.filter(([_, value]) => isDefined(value))
.forEach(([key, value]) => {
if (Array.isArray(value)) {
value.forEach((v) => process(key, v));
} else {
process(key, value);
}
});
Object.entries(options.formData)
.filter(([_, value]) => isDefined(value))
.forEach(([key, value]) => {
if (Array.isArray(value)) {
value.forEach(v => process(key, v));
} else {
process(key, value);
}
});
return formData;
}
return undefined;
return formData;
}
return undefined;
};
type Resolver<T> = (options: ApiRequestOptions) => Promise<T>;
export const resolve = async <T>(
options: ApiRequestOptions,
resolver?: T | Resolver<T>
): Promise<T | undefined> => {
if (typeof resolver === "function") {
return (resolver as Resolver<T>)(options);
}
return resolver;
export const resolve = async <T>(options: ApiRequestOptions, resolver?: T | Resolver<T>): Promise<T | undefined> => {
if (typeof resolver === 'function') {
return (resolver as Resolver<T>)(options);
}
return resolver;
};
export const getHeaders = async (
config: OpenAPIConfig,
options: ApiRequestOptions
): Promise<Headers> => {
const token = await resolve(options, config.TOKEN);
const username = await resolve(options, config.USERNAME);
const password = await resolve(options, config.PASSWORD);
const additionalHeaders = await resolve(options, config.HEADERS);
export const getHeaders = async (config: OpenAPIConfig, options: ApiRequestOptions): Promise<Headers> => {
const token = await resolve(options, config.TOKEN);
const username = await resolve(options, config.USERNAME);
const password = await resolve(options, config.PASSWORD);
const additionalHeaders = await resolve(options, config.HEADERS);
const headers = Object.entries({
Accept: "application/json",
...additionalHeaders,
...options.headers,
})
.filter(([_, value]) => isDefined(value))
.reduce(
(headers, [key, value]) => ({
...headers,
[key]: String(value),
}),
{} as Record<string, string>
);
const headers = Object.entries({
Accept: 'application/json',
...additionalHeaders,
...options.headers,
})
.filter(([_, value]) => isDefined(value))
.reduce((headers, [key, value]) => ({
...headers,
[key]: String(value),
}), {} as Record<string, string>);
if (isStringWithValue(token)) {
headers["Authorization"] = `Bearer ${token}`;
}
if (isStringWithValue(username) && isStringWithValue(password)) {
const credentials = base64(`${username}:${password}`);
headers["Authorization"] = `Basic ${credentials}`;
}
if (options.body) {
if (options.mediaType) {
headers["Content-Type"] = options.mediaType;
} else if (isBlob(options.body)) {
headers["Content-Type"] = "application/octet-stream";
} else if (isString(options.body)) {
headers["Content-Type"] = "text/plain";
} else if (!isFormData(options.body)) {
headers["Content-Type"] = "application/json";
if (isStringWithValue(token)) {
headers['Authorization'] = `Bearer ${token}`;
}
}
return new Headers(headers);
if (isStringWithValue(username) && isStringWithValue(password)) {
const credentials = base64(`${username}:${password}`);
headers['Authorization'] = `Basic ${credentials}`;
}
if (options.body) {
if (options.mediaType) {
headers['Content-Type'] = options.mediaType;
} else if (isBlob(options.body)) {
headers['Content-Type'] = 'application/octet-stream';
} else if (isString(options.body)) {
headers['Content-Type'] = 'text/plain';
} else if (!isFormData(options.body)) {
headers['Content-Type'] = 'application/json';
}
}
return new Headers(headers);
};
export const getRequestBody = (options: ApiRequestOptions): any => {
if (options.body !== undefined) {
if (options.mediaType?.includes("/json")) {
return JSON.stringify(options.body);
} else if (isString(options.body) || isBlob(options.body) || isFormData(options.body)) {
return options.body as any;
} else {
return JSON.stringify(options.body);
if (options.body !== undefined) {
if (options.mediaType?.includes('/json')) {
return JSON.stringify(options.body)
} else if (isString(options.body) || isBlob(options.body) || isFormData(options.body)) {
return options.body as any;
} else {
return JSON.stringify(options.body);
}
}
}
return undefined;
return undefined;
};
export const sendRequest = async (
options: ApiRequestOptions,
url: string,
body: any,
formData: FormData | undefined,
headers: Headers,
onCancel: OnCancel
options: ApiRequestOptions,
url: string,
body: any,
formData: FormData | undefined,
headers: Headers,
onCancel: OnCancel
): Promise<Response> => {
const controller = new AbortController();
const controller = new AbortController();
const request: RequestInit = {
headers,
method: options.method,
body: body ?? formData,
signal: controller.signal as AbortSignal,
};
const request: RequestInit = {
headers,
method: options.method,
body: body ?? formData,
signal: controller.signal as AbortSignal,
};
onCancel(() => controller.abort());
onCancel(() => controller.abort());
return await fetch(url, request);
return await fetch(url, request);
};
export const getResponseHeader = (
response: Response,
responseHeader?: string
): string | undefined => {
if (responseHeader) {
const content = response.headers.get(responseHeader);
if (isString(content)) {
return content;
export const getResponseHeader = (response: Response, responseHeader?: string): string | undefined => {
if (responseHeader) {
const content = response.headers.get(responseHeader);
if (isString(content)) {
return content;
}
}
}
return undefined;
return undefined;
};
export const getResponseBody = async (response: Response): Promise<any> => {
if (response.status !== 204) {
try {
const contentType = response.headers.get("Content-Type");
if (contentType) {
const jsonTypes = ["application/json", "application/problem+json"];
const isJSON = jsonTypes.some((type) => contentType.toLowerCase().startsWith(type));
if (isJSON) {
return await response.json();
} else {
return await response.text();
if (response.status !== 204) {
try {
const contentType = response.headers.get('Content-Type');
if (contentType) {
const jsonTypes = ['application/json', 'application/problem+json']
const isJSON = jsonTypes.some(type => contentType.toLowerCase().startsWith(type));
if (isJSON) {
return await response.json();
} else {
return await response.text();
}
}
} catch (error) {
console.error(error);
}
}
} catch (error) {
console.error(error);
}
}
return undefined;
return undefined;
};
export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult): void => {
const errors: Record<number, string> = {
400: "Bad Request",
401: "Unauthorized",
403: "Forbidden",
404: "Not Found",
500: "Internal Server Error",
502: "Bad Gateway",
503: "Service Unavailable",
...options.errors,
};
const errors: Record<number, string> = {
400: 'Bad Request',
401: 'Unauthorized',
403: 'Forbidden',
404: 'Not Found',
500: 'Internal Server Error',
502: 'Bad Gateway',
503: 'Service Unavailable',
...options.errors,
}
const error = errors[result.status];
if (error) {
throw new ApiError(options, result, error);
}
const error = errors[result.status];
if (error) {
throw new ApiError(options, result, error);
}
if (!result.ok) {
const errorStatus = result.status ?? "unknown";
const errorStatusText = result.statusText ?? "unknown";
const errorBody = (() => {
try {
return JSON.stringify(result.body, null, 2);
} catch (e) {
return undefined;
}
})();
if (!result.ok) {
const errorStatus = result.status ?? 'unknown';
const errorStatusText = result.statusText ?? 'unknown';
const errorBody = (() => {
try {
return JSON.stringify(result.body, null, 2);
} catch (e) {
return undefined;
}
})();
throw new ApiError(
options,
result,
`Generic Error: status: ${errorStatus}; status text: ${errorStatusText}; body: ${errorBody}`
);
}
throw new ApiError(options, result,
`Generic Error: status: ${errorStatus}; status text: ${errorStatusText}; body: ${errorBody}`
);
}
};
/**
@@ -306,36 +288,33 @@ export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult):
* @returns CancelablePromise<T>
* @throws ApiError
*/
export const request = <T>(
config: OpenAPIConfig,
options: ApiRequestOptions
): CancelablePromise<T> => {
return new CancelablePromise(async (resolve, reject, onCancel) => {
try {
const url = getUrl(config, options);
const formData = getFormData(options);
const body = getRequestBody(options);
const headers = await getHeaders(config, options);
export const request = <T>(config: OpenAPIConfig, options: ApiRequestOptions): CancelablePromise<T> => {
return new CancelablePromise(async (resolve, reject, onCancel) => {
try {
const url = getUrl(config, options);
const formData = getFormData(options);
const body = getRequestBody(options);
const headers = await getHeaders(config, options);
if (!onCancel.isCancelled) {
const response = await sendRequest(options, url, body, formData, headers, onCancel);
const responseBody = await getResponseBody(response);
const responseHeader = getResponseHeader(response, options.responseHeader);
if (!onCancel.isCancelled) {
const response = await sendRequest(options, url, body, formData, headers, onCancel);
const responseBody = await getResponseBody(response);
const responseHeader = getResponseHeader(response, options.responseHeader);
const result: ApiResult = {
url,
ok: response.ok,
status: response.status,
statusText: response.statusText,
body: responseHeader ?? responseBody,
};
const result: ApiResult = {
url,
ok: response.ok,
status: response.status,
statusText: response.statusText,
body: responseHeader ?? responseBody,
};
catchErrorCodes(options, result);
catchErrorCodes(options, result);
resolve(result.body);
}
} catch (error) {
reject(error);
}
});
resolve(result.body);
}
} catch (error) {
reject(error);
}
});
};

View File

@@ -82,7 +82,7 @@ export class DefaultService {
tags?: Record<string, string>;
},
): CancelablePromise<{
status: 'ok';
status: ('ok' | 'error');
}> {
return this.httpRequest.request({
method: 'POST',

View File

@@ -2,10 +2,13 @@ import dotenv from "dotenv";
import { expect, test } from "vitest";
import OpenAI from ".";
import {
ChatCompletion,
CompletionCreateParams,
CreateChatCompletionRequestMessage,
} from "openai-beta/resources/chat/completions";
import { OPClient } from "../codegen";
import mergeChunks from "./mergeChunks";
import assert from "assert";
dotenv.config({ path: "../.env" });
@@ -31,9 +34,7 @@ test("basic call", async () => {
};
const completion = await oaiClient.chat.completions.create({
...payload,
openpipe: {
tags: { promptId: "test" },
},
openpipe: { tags: { promptId: "test" } },
});
await completion.openpipe.reportingFinished;
const lastLogged = await lastLoggedCall();
@@ -46,29 +47,32 @@ const randomString = (length: number) => {
const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
return Array.from(
{ length },
() => characters[Math.floor(Math.random() * characters.length)]
() => characters[Math.floor(Math.random() * characters.length)],
).join("");
};
test.skip("streaming", async () => {
test("streaming", async () => {
const completion = await oaiClient.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [{ role: "system", content: "count to 4" }],
messages: [{ role: "system", content: "count to 3" }],
stream: true,
});
let merged = null;
let merged: ChatCompletion | null = null;
for await (const chunk of completion) {
merged = merge_openai_chunks(merged, chunk);
merged = mergeChunks(merged, chunk);
}
const lastLogged = await lastLoggedCall();
expect(lastLogged?.modelResponse?.respPayload.choices[0].message.content).toBe(
merged.choices[0].message.content
);
await completion.openpipe.reportingFinished;
expect(merged).toMatchObject(lastLogged?.modelResponse?.respPayload);
expect(lastLogged?.modelResponse?.reqPayload.messages).toMatchObject([
{ role: "system", content: "count to 3" },
]);
});
test.skip("bad call streaming", async () => {
test("bad call streaming", async () => {
try {
await oaiClient.chat.completions.create({
model: "gpt-3.5-turbo-blaster",
@@ -76,26 +80,29 @@ test.skip("bad call streaming", async () => {
stream: true,
});
} catch (e) {
await e.openpipe.reportingFinished;
const lastLogged = await lastLoggedCall();
expect(lastLogged?.modelResponse?.errorMessage).toBe(
"The model `gpt-3.5-turbo-blaster` does not exist"
expect(lastLogged?.modelResponse?.errorMessage).toEqual(
"The model `gpt-3.5-turbo-blaster` does not exist",
);
expect(lastLogged?.modelResponse?.statusCode).toBe(404);
expect(lastLogged?.modelResponse?.statusCode).toEqual(404);
}
});
test("bad call", async () => {
try {
await oaiClient.chat.completions.create({
model: "gpt-3.5-turbo-booster",
model: "gpt-3.5-turbo-buster",
messages: [{ role: "system", content: "count to 10" }],
});
} catch (e) {
assert("openpipe" in e);
await e.openpipe.reportingFinished;
const lastLogged = await lastLoggedCall();
expect(lastLogged?.modelResponse?.errorMessage).toBe(
"The model `gpt-3.5-turbo-booster` does not exist"
expect(lastLogged?.modelResponse?.errorMessage).toEqual(
"The model `gpt-3.5-turbo-buster` does not exist",
);
expect(lastLogged?.modelResponse?.statusCode).toBe(404);
expect(lastLogged?.modelResponse?.statusCode).toEqual(404);
}
});
@@ -109,12 +116,12 @@ test("caching", async () => {
messages: [message],
openpipe: { cache: true },
});
expect(completion.openpipe.cacheStatus).toBe("MISS");
expect(completion.openpipe.cacheStatus).toEqual("MISS");
await completion.openpipe.reportingFinished;
const firstLogged = await lastLoggedCall();
expect(completion.choices[0].message.content).toBe(
firstLogged?.modelResponse?.respPayload.choices[0].message.content
expect(completion.choices[0].message.content).toEqual(
firstLogged?.modelResponse?.respPayload.choices[0].message.content,
);
const completion2 = await oaiClient.chat.completions.create({
@@ -122,5 +129,5 @@ test("caching", async () => {
messages: [message],
openpipe: { cache: true },
});
expect(completion2.openpipe.cacheStatus).toBe("HIT");
expect(completion2.openpipe.cacheStatus).toEqual("HIT");
});

View File

@@ -5,9 +5,9 @@ import {
ChatCompletion,
ChatCompletionChunk,
CompletionCreateParams,
Completions,
} from "openai-beta/resources/chat/completions";
import { WrappedStream } from "./streaming";
import { DefaultService, OPClient } from "../codegen";
import { Stream } from "openai-beta/streaming";
import { OpenPipeArgs, OpenPipeMeta, type OpenPipeConfig, getTags } from "../shared";
@@ -27,11 +27,11 @@ export default class OpenAI extends openai.OpenAI {
BASE:
openpipe?.baseUrl ?? readEnv("OPENPIPE_BASE_URL") ?? "https://app.openpipe.ai/api/v1",
TOKEN: openPipeApiKey,
})
}),
);
} else {
console.warn(
"You're using the OpenPipe client without an API key. No completion requests will be logged."
"You're using the OpenPipe client without an API key. No completion requests will be logged.",
);
}
}
@@ -43,10 +43,10 @@ class WrappedChat extends openai.OpenAI.Chat {
this.completions.opClient = client;
}
completions: InstrumentedCompletions = new InstrumentedCompletions(this.client);
completions: WrappedCompletions = new WrappedCompletions(this.client);
}
class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
class WrappedCompletions extends openai.OpenAI.Chat.Completions {
opClient?: OPClient;
constructor(client: openai.OpenAI, opClient?: OPClient) {
@@ -54,32 +54,35 @@ class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
this.opClient = opClient;
}
_report(args: Parameters<DefaultService["report"]>[0]) {
async _report(args: Parameters<DefaultService["report"]>[0]) {
try {
return this.opClient ? this.opClient.default.report(args) : Promise.resolve();
this.opClient ? await this.opClient.default.report(args) : Promise.resolve();
} catch (e) {
console.error(e);
return Promise.resolve();
}
}
create(
body: CompletionCreateParams.CreateChatCompletionRequestNonStreaming & OpenPipeArgs,
options?: Core.RequestOptions
options?: Core.RequestOptions,
): Promise<Core.APIResponse<ChatCompletion & { openpipe: OpenPipeMeta }>>;
create(
body: CompletionCreateParams.CreateChatCompletionRequestStreaming & OpenPipeArgs,
options?: Core.RequestOptions
): Promise<Core.APIResponse<Stream<ChatCompletionChunk>>>;
options?: Core.RequestOptions,
): Promise<Core.APIResponse<WrappedStream>>;
async create(
{ openpipe, ...body }: CompletionCreateParams & OpenPipeArgs,
options?: Core.RequestOptions
): Promise<
Core.APIResponse<(ChatCompletion & { openpipe: OpenPipeMeta }) | Stream<ChatCompletionChunk>>
> {
console.log("LALALA REPORT", this.opClient);
options?: Core.RequestOptions,
): Promise<Core.APIResponse<(ChatCompletion & { openpipe: OpenPipeMeta }) | WrappedStream>> {
const requestedAt = Date.now();
const cacheRequested = openpipe?.cache ?? false;
let reportingFinished: OpenPipeMeta["reportingFinished"] = Promise.resolve();
let cacheRequested = openpipe?.cache ?? false;
if (cacheRequested && body.stream) {
console.warn(
`Caching is not yet supported for streaming requests. Ignoring cache flag. Vote for this feature at https://github.com/OpenPipe/OpenPipe/issues/159`,
);
cacheRequested = false;
}
if (cacheRequested) {
try {
@@ -92,12 +95,13 @@ class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
.then((res) => res.respPayload);
if (cached) {
const meta = {
cacheStatus: "HIT",
reportingFinished,
};
return {
...cached,
openpipe: {
cacheStatus: "HIT",
reportingFinished: Promise.resolve(),
},
openpipe: meta,
};
}
} catch (e) {
@@ -105,15 +109,23 @@ class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
}
}
let reportingFinished: OpenPipeMeta["reportingFinished"] = Promise.resolve();
try {
if (body.stream) {
const stream = await super.create(body, options);
const wrappedStream = new WrappedStream(stream, (response) =>
this._report({
requestedAt,
receivedAt: Date.now(),
reqPayload: body,
respPayload: response,
statusCode: 200,
tags: getTags(openpipe),
}),
);
// Do some logging of each chunk here
return stream;
return wrappedStream;
} else {
const response = await super.create(body, options);
@@ -147,6 +159,16 @@ class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
tags: getTags(openpipe),
});
}
// make sure error is an object we can add properties to
if (typeof error === "object" && error !== null) {
error = {
...error,
openpipe: {
cacheStatus: cacheRequested ? "MISS" : "SKIP",
reportingFinished,
},
};
}
throw error;
}

View File

@@ -0,0 +1,43 @@
import { ChatCompletion, ChatCompletionChunk } from "openai-beta/resources/chat";
import { Stream } from "openai-beta/streaming";
import { OpenPipeMeta } from "../shared";
import mergeChunks from "./mergeChunks";
export class WrappedStream extends Stream<ChatCompletionChunk> {
openpipe: OpenPipeMeta;
private resolveReportingFinished: () => void = () => {};
private report: (response: unknown) => Promise<void>;
constructor(stream: Stream<ChatCompletionChunk>, report: (response: unknown) => Promise<void>) {
super(stream.response, stream.controller);
this.report = report;
const reportingFinished = new Promise<void>((resolve) => {
this.resolveReportingFinished = resolve;
});
this.openpipe = {
cacheStatus: "MISS",
reportingFinished,
};
}
async *[Symbol.asyncIterator](): AsyncIterator<ChatCompletionChunk, any, undefined> {
const iterator = super[Symbol.asyncIterator]();
let combinedResponse: ChatCompletion | null = null;
while (true) {
const result = await iterator.next();
if (result.done) break;
combinedResponse = mergeChunks(combinedResponse, result.value);
yield result.value;
}
await this.report(combinedResponse);
// Resolve the promise here
this.resolveReportingFinished();
}
}

View File

@@ -1,4 +1,5 @@
import pkg from "../package.json";
import { DefaultService } from "./codegen";
export type OpenPipeConfig = {
apiKey?: string;
@@ -15,9 +16,11 @@ export type OpenPipeMeta = {
// We report your call to OpenPipe asynchronously in the background. If you
// need to wait until the report is sent to take further action, you can await
// this promise.
reportingFinished: Promise<void | { status: "ok" }>;
reportingFinished: Promise<void>;
};
export type ReportFn = (...args: Parameters<DefaultService["report"]>) => Promise<void>;
export const getTags = (args: OpenPipeArgs["openpipe"]): Record<string, string> => ({
...args?.tags,
...(args?.cache ? { $cache: args.cache?.toString() } : {}),

105
pnpm-lock.yaml generated
View File

@@ -134,6 +134,9 @@ importers:
kysely:
specifier: ^0.26.1
version: 0.26.1
kysely-codegen:
specifier: ^0.10.1
version: 0.10.1(kysely@0.26.1)(pg@8.11.2)
lodash-es:
specifier: ^4.17.21
version: 4.17.21
@@ -163,7 +166,7 @@ importers:
version: 6.9.4
openai:
specifier: 4.0.0-beta.7
version: 4.0.0-beta.7
version: 4.0.0-beta.7(encoding@0.1.13)
openpipe:
specifier: workspace:*
version: link:../client-libs/typescript
@@ -354,6 +357,9 @@ importers:
client-libs/typescript:
dependencies:
encoding:
specifier: ^0.1.13
version: 0.1.13
form-data:
specifier: ^4.0.0
version: 4.0.0
@@ -361,11 +367,11 @@ importers:
specifier: ^4.17.21
version: 4.17.21
node-fetch:
specifier: ^3.3.2
version: 3.3.2
specifier: ^2.6.12
version: 2.6.12(encoding@0.1.13)
openai-beta:
specifier: npm:openai@4.0.0-beta.7
version: /openai@4.0.0-beta.7
version: /openai@4.0.0-beta.7(encoding@0.1.13)
openai-legacy:
specifier: npm:openai@3.3.0
version: /openai@3.3.0
@@ -376,6 +382,9 @@ importers:
'@types/node':
specifier: ^20.4.8
version: 20.4.8
'@types/node-fetch':
specifier: ^2.6.4
version: 2.6.4
dotenv:
specifier: ^16.3.1
version: 16.3.1
@@ -413,7 +422,7 @@ packages:
digest-fetch: 1.3.0
form-data-encoder: 1.7.2
formdata-node: 4.4.1
node-fetch: 2.6.12
node-fetch: 2.6.12(encoding@0.1.13)
transitivePeerDependencies:
- encoding
dev: false
@@ -2687,7 +2696,7 @@ packages:
dependencies:
https-proxy-agent: 5.0.1
mkdirp: 0.5.6
node-fetch: 2.6.12
node-fetch: 2.6.12(encoding@0.1.13)
progress: 2.0.3
proxy-from-env: 1.1.0
which: 2.0.2
@@ -3177,7 +3186,6 @@ packages:
dependencies:
'@types/node': 20.4.10
form-data: 3.0.1
dev: false
/@types/node@18.16.0:
resolution: {integrity: sha512-BsAaKhB+7X+H4GnSjGhJG9Qi8Tw+inU9nJDwmD5CgOmBLEI6ArdhikpLX7DjbjDRDTbqZzU2LSQNZg8WGPiSZQ==}
@@ -3828,7 +3836,6 @@ packages:
/asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
dev: false
/available-typed-arrays@1.0.5:
resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==}
@@ -4219,7 +4226,6 @@ packages:
engines: {node: '>= 0.8'}
dependencies:
delayed-stream: 1.0.0
dev: false
/comma-separated-tokens@1.0.8:
resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==}
@@ -4504,11 +4510,6 @@ packages:
assert-plus: 1.0.0
dev: false
/data-uri-to-buffer@4.0.1:
resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==}
engines: {node: '>= 12'}
dev: false
/date-fns@2.30.0:
resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==}
engines: {node: '>=0.11'}
@@ -4592,7 +4593,6 @@ packages:
/delayed-stream@1.0.0:
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
engines: {node: '>=0.4.0'}
dev: false
/depd@1.1.2:
resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==}
@@ -4726,6 +4726,12 @@ packages:
engines: {node: '>= 0.8'}
dev: false
/encoding@0.1.13:
resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==}
dependencies:
iconv-lite: 0.6.3
dev: false
/engine.io-client@6.5.2:
resolution: {integrity: sha512-CQZqbrpEYnrpGqC07a9dJDz4gePZUgTPMU3NKJPSeQOyw27Tst4Pl3FemKoFGAlHzgZmKjoRmiJvbWfhCXUlIg==}
dependencies:
@@ -5396,14 +5402,6 @@ packages:
format: 0.2.2
dev: false
/fetch-blob@3.2.0:
resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
engines: {node: ^12.20 || >= 14.13}
dependencies:
node-domexception: 1.0.0
web-streams-polyfill: 3.2.1
dev: false
/fflate@0.4.8:
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
dev: false
@@ -5519,7 +5517,6 @@ packages:
asynckit: 0.4.0
combined-stream: 1.0.8
mime-types: 2.1.35
dev: false
/form-data@4.0.0:
resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==}
@@ -5543,13 +5540,6 @@ packages:
web-streams-polyfill: 4.0.0-beta.3
dev: false
/formdata-polyfill@4.0.10:
resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==}
engines: {node: '>=12.20.0'}
dependencies:
fetch-blob: 3.2.0
dev: false
/forwarded@0.2.0:
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
engines: {node: '>= 0.6'}
@@ -5965,6 +5955,13 @@ packages:
safer-buffer: 2.1.2
dev: false
/iconv-lite@0.6.3:
resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
engines: {node: '>=0.10.0'}
dependencies:
safer-buffer: 2.1.2
dev: false
/ignore@5.2.4:
resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==}
engines: {node: '>= 4'}
@@ -6391,6 +6388,30 @@ packages:
object.values: 1.1.6
dev: true
/kysely-codegen@0.10.1(kysely@0.26.1)(pg@8.11.2):
resolution: {integrity: sha512-8Bslh952gN5gtucRv4jTZDFD18RBioS6M50zHfe5kwb5iSyEAunU4ZYMdHzkHraa4zxjg5/183XlOryBCXLRIw==}
hasBin: true
peerDependencies:
better-sqlite3: '>=7.6.2'
kysely: '>=0.19.12'
mysql2: ^2.3.3 || ^3.0.0
pg: ^8.8.0
peerDependenciesMeta:
better-sqlite3:
optional: true
mysql2:
optional: true
pg:
optional: true
dependencies:
chalk: 4.1.2
dotenv: 16.3.1
kysely: 0.26.1
micromatch: 4.0.5
minimist: 1.2.8
pg: 8.11.2
dev: false
/kysely@0.26.1:
resolution: {integrity: sha512-FVRomkdZofBu3O8SiwAOXrwbhPZZr8mBN5ZeUWyprH29jzvy6Inzqbd0IMmGxpd4rcOCL9HyyBNWBa8FBqDAdg==}
engines: {node: '>=14.0.0'}
@@ -6611,7 +6632,6 @@ packages:
dependencies:
braces: 3.0.2
picomatch: 2.3.1
dev: true
/mime-db@1.52.0:
resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
@@ -6833,7 +6853,7 @@ packages:
engines: {node: '>=10.5.0'}
dev: false
/node-fetch@2.6.12:
/node-fetch@2.6.12(encoding@0.1.13):
resolution: {integrity: sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==}
engines: {node: 4.x || >=6.0.0}
peerDependencies:
@@ -6842,18 +6862,10 @@ packages:
encoding:
optional: true
dependencies:
encoding: 0.1.13
whatwg-url: 5.0.0
dev: false
/node-fetch@3.3.2:
resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
dependencies:
data-uri-to-buffer: 4.0.1
fetch-blob: 3.2.0
formdata-polyfill: 4.0.10
dev: false
/node-mocks-http@1.12.2:
resolution: {integrity: sha512-xhWwC0dh35R9rf0j3bRZXuISXdHxxtMx0ywZQBwjrg3yl7KpRETzogfeCamUIjltpn0Fxvs/ZhGJul1vPLrdJQ==}
engines: {node: '>=0.6'}
@@ -7001,7 +7013,7 @@ packages:
- debug
dev: false
/openai@4.0.0-beta.7:
/openai@4.0.0-beta.7(encoding@0.1.13):
resolution: {integrity: sha512-jHjwvpMuGkNxiQ3erwLZsOvPEhcVrMtwtfNeYmGCjhbdB+oStVw/7pIhIPkualu8rlhLwgMR7awknIaN3IQcOA==}
dependencies:
'@types/node': 18.16.0
@@ -7011,7 +7023,7 @@ packages:
digest-fetch: 1.3.0
form-data-encoder: 1.7.2
formdata-node: 4.4.1
node-fetch: 2.6.12
node-fetch: 2.6.12(encoding@0.1.13)
transitivePeerDependencies:
- encoding
dev: false
@@ -9111,11 +9123,6 @@ packages:
glob-to-regexp: 0.4.1
graceful-fs: 4.2.11
/web-streams-polyfill@3.2.1:
resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==}
engines: {node: '>= 8'}
dev: false
/web-streams-polyfill@4.0.0-beta.3:
resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==}
engines: {node: '>= 14'}