Compare commits
3 Commits
job-dedupe
...
empty-scen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
03a8d094fc | ||
|
|
2b990622f5 | ||
|
|
d079eba557 |
@@ -10,4 +10,6 @@ pnpm tsx src/promptConstructor/migrate.ts
|
|||||||
|
|
||||||
echo "Starting the server"
|
echo "Starting the server"
|
||||||
|
|
||||||
pnpm start
|
pnpm concurrently --kill-others \
|
||||||
|
"pnpm start" \
|
||||||
|
"pnpm tsx src/server/tasks/worker.ts"
|
||||||
@@ -8,7 +8,7 @@ import {
|
|||||||
useHandledAsyncCallback,
|
useHandledAsyncCallback,
|
||||||
useVisibleScenarioIds,
|
useVisibleScenarioIds,
|
||||||
} from "~/utils/hooks";
|
} from "~/utils/hooks";
|
||||||
import { cellPadding } from "./constants";
|
import { cellPadding } from "../constants";
|
||||||
import { ActionButton } from "./ScenariosHeader";
|
import { ActionButton } from "./ScenariosHeader";
|
||||||
|
|
||||||
export default function AddVariantButton() {
|
export default function AddVariantButton() {
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import {
|
|||||||
VStack,
|
VStack,
|
||||||
} from "@chakra-ui/react";
|
} from "@chakra-ui/react";
|
||||||
import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
|
import { BsArrowsAngleExpand, BsX } from "react-icons/bs";
|
||||||
import { cellPadding } from "./constants";
|
import { cellPadding } from "../constants";
|
||||||
import { FloatingLabelInput } from "./FloatingLabelInput";
|
import { FloatingLabelInput } from "./FloatingLabelInput";
|
||||||
import { ScenarioEditorModal } from "./ScenarioEditorModal";
|
import { ScenarioEditorModal } from "./ScenarioEditorModal";
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import {
|
|||||||
IconButton,
|
IconButton,
|
||||||
Spinner,
|
Spinner,
|
||||||
} from "@chakra-ui/react";
|
} from "@chakra-ui/react";
|
||||||
import { cellPadding } from "./constants";
|
import { cellPadding } from "../constants";
|
||||||
import {
|
import {
|
||||||
useExperiment,
|
useExperiment,
|
||||||
useExperimentAccess,
|
useExperimentAccess,
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
|||||||
setIsChanged(false);
|
setIsChanged(false);
|
||||||
|
|
||||||
await utils.promptVariants.list.invalidate();
|
await utils.promptVariants.list.invalidate();
|
||||||
}, [checkForChanges, replaceVariant.mutateAsync]);
|
}, [checkForChanges]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (monaco) {
|
if (monaco) {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { HStack, Icon, Text, useToken } from "@chakra-ui/react";
|
import { HStack, Icon, Text, useToken } from "@chakra-ui/react";
|
||||||
import { type PromptVariant } from "./types";
|
import { type PromptVariant } from "./types";
|
||||||
import { cellPadding } from "./constants";
|
import { cellPadding } from "../constants";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import chroma from "chroma-js";
|
import chroma from "chroma-js";
|
||||||
import { BsCurrencyDollar } from "react-icons/bs";
|
import { BsCurrencyDollar } from "react-icons/bs";
|
||||||
|
|||||||
@@ -3,14 +3,13 @@ import { api } from "~/utils/api";
|
|||||||
import AddVariantButton from "./AddVariantButton";
|
import AddVariantButton from "./AddVariantButton";
|
||||||
import ScenarioRow from "./ScenarioRow";
|
import ScenarioRow from "./ScenarioRow";
|
||||||
import VariantEditor from "./VariantEditor";
|
import VariantEditor from "./VariantEditor";
|
||||||
import VariantHeader from "./VariantHeader/VariantHeader";
|
import VariantHeader from "../VariantHeader/VariantHeader";
|
||||||
import VariantStats from "./VariantStats";
|
import VariantStats from "./VariantStats";
|
||||||
import { ScenariosHeader } from "./ScenariosHeader";
|
import { ScenariosHeader } from "./ScenariosHeader";
|
||||||
import { borders } from "./styles";
|
import { borders } from "./styles";
|
||||||
import { useScenarios } from "~/utils/hooks";
|
import { useScenarios } from "~/utils/hooks";
|
||||||
import ScenarioPaginator from "./ScenarioPaginator";
|
import ScenarioPaginator from "./ScenarioPaginator";
|
||||||
import { Fragment } from "react";
|
import { Fragment } from "react";
|
||||||
import useScrolledPast from "./useHasScrolledPast";
|
|
||||||
|
|
||||||
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
|
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
|
||||||
const variants = api.promptVariants.list.useQuery(
|
const variants = api.promptVariants.list.useQuery(
|
||||||
@@ -19,7 +18,6 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
|||||||
);
|
);
|
||||||
|
|
||||||
const scenarios = useScenarios();
|
const scenarios = useScenarios();
|
||||||
const shouldFlattenHeader = useScrolledPast(50);
|
|
||||||
|
|
||||||
if (!variants.data || !scenarios.data) return null;
|
if (!variants.data || !scenarios.data) return null;
|
||||||
|
|
||||||
@@ -65,8 +63,8 @@ export default function OutputsTable({ experimentId }: { experimentId: string |
|
|||||||
variant={variant}
|
variant={variant}
|
||||||
canHide={variants.data.length > 1}
|
canHide={variants.data.length > 1}
|
||||||
rowStart={1}
|
rowStart={1}
|
||||||
borderTopLeftRadius={isFirst && !shouldFlattenHeader ? 8 : 0}
|
borderTopLeftRadius={isFirst ? 8 : 0}
|
||||||
borderTopRightRadius={isLast && !shouldFlattenHeader ? 8 : 0}
|
borderTopRightRadius={isLast ? 8 : 0}
|
||||||
{...sharedProps}
|
{...sharedProps}
|
||||||
/>
|
/>
|
||||||
<GridItem rowStart={2} {...sharedProps}>
|
<GridItem rowStart={2} {...sharedProps}>
|
||||||
|
|||||||
@@ -1,34 +0,0 @@
|
|||||||
import { useState, useEffect } from "react";
|
|
||||||
|
|
||||||
const useScrolledPast = (scrollThreshold: number) => {
|
|
||||||
const [hasScrolledPast, setHasScrolledPast] = useState(true);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const container = document.getElementById("output-container");
|
|
||||||
|
|
||||||
if (!container) {
|
|
||||||
console.warn('Element with id "outputs-container" not found.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const checkScroll = () => {
|
|
||||||
const { scrollTop } = container;
|
|
||||||
|
|
||||||
// Check if scrollTop is greater than or equal to scrollThreshold
|
|
||||||
setHasScrolledPast(scrollTop > scrollThreshold);
|
|
||||||
};
|
|
||||||
|
|
||||||
checkScroll();
|
|
||||||
|
|
||||||
container.addEventListener("scroll", checkScroll);
|
|
||||||
|
|
||||||
// Cleanup
|
|
||||||
return () => {
|
|
||||||
container.removeEventListener("scroll", checkScroll);
|
|
||||||
};
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return hasScrolledPast;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default useScrolledPast;
|
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
import { useState, type DragEvent } from "react";
|
import { useState, type DragEvent } from "react";
|
||||||
import { type PromptVariant } from "../types";
|
import { type PromptVariant } from "../OutputsTable/types";
|
||||||
import { api } from "~/utils/api";
|
import { api } from "~/utils/api";
|
||||||
import { RiDraggable } from "react-icons/ri";
|
import { RiDraggable } from "react-icons/ri";
|
||||||
import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
import { useExperimentAccess, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here
|
import { HStack, Icon, Text, GridItem, type GridItemProps } from "@chakra-ui/react"; // Changed here
|
||||||
import { cellPadding, headerMinHeight } from "../constants";
|
import { cellPadding, headerMinHeight } from "../constants";
|
||||||
import AutoResizeTextArea from "../../AutoResizeTextArea";
|
import AutoResizeTextArea from "../AutoResizeTextArea";
|
||||||
import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
|
import VariantHeaderMenuButton from "./VariantHeaderMenuButton";
|
||||||
|
|
||||||
export default function VariantHeader(
|
export default function VariantHeader(
|
||||||
@@ -75,7 +75,7 @@ export default function VariantHeader(
|
|||||||
padding={0}
|
padding={0}
|
||||||
sx={{
|
sx={{
|
||||||
position: "sticky",
|
position: "sticky",
|
||||||
top: "0",
|
top: "-2",
|
||||||
// Ensure that the menu always appears above the sticky header of other variants
|
// Ensure that the menu always appears above the sticky header of other variants
|
||||||
zIndex: menuOpen ? "dropdown" : 10,
|
zIndex: menuOpen ? "dropdown" : 10,
|
||||||
}}
|
}}
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
import { useState } from "react";
|
import { type PromptVariant } from "../OutputsTable/types";
|
||||||
|
import { api } from "~/utils/api";
|
||||||
|
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||||
import {
|
import {
|
||||||
Icon,
|
Icon,
|
||||||
Menu,
|
Menu,
|
||||||
@@ -12,13 +14,10 @@ import {
|
|||||||
} from "@chakra-ui/react";
|
} from "@chakra-ui/react";
|
||||||
import { BsFillTrashFill, BsGear, BsStars } from "react-icons/bs";
|
import { BsFillTrashFill, BsGear, BsStars } from "react-icons/bs";
|
||||||
import { FaRegClone } from "react-icons/fa";
|
import { FaRegClone } from "react-icons/fa";
|
||||||
|
import { useState } from "react";
|
||||||
|
import { RefinePromptModal } from "../RefinePromptModal/RefinePromptModal";
|
||||||
import { RiExchangeFundsFill } from "react-icons/ri";
|
import { RiExchangeFundsFill } from "react-icons/ri";
|
||||||
|
import { ChangeModelModal } from "../ChangeModelModal/ChangeModelModal";
|
||||||
import { api } from "~/utils/api";
|
|
||||||
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
|
||||||
import { type PromptVariant } from "../types";
|
|
||||||
import { RefinePromptModal } from "../../RefinePromptModal/RefinePromptModal";
|
|
||||||
import { ChangeModelModal } from "../../ChangeModelModal/ChangeModelModal";
|
|
||||||
|
|
||||||
export default function VariantHeaderMenuButton({
|
export default function VariantHeaderMenuButton({
|
||||||
variant,
|
variant,
|
||||||
@@ -67,13 +67,7 @@ export default function ProjectMenu() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<VStack
|
<VStack w="full" alignItems="flex-start" spacing={0} py={1}>
|
||||||
w="full"
|
|
||||||
alignItems="flex-start"
|
|
||||||
spacing={0}
|
|
||||||
py={1}
|
|
||||||
zIndex={popover.isOpen ? "dropdown" : undefined}
|
|
||||||
>
|
|
||||||
<Popover
|
<Popover
|
||||||
placement="bottom"
|
placement="bottom"
|
||||||
isOpen={popover.isOpen}
|
isOpen={popover.isOpen}
|
||||||
|
|||||||
@@ -26,10 +26,6 @@ export const env = createEnv({
|
|||||||
SMTP_PORT: z.string().default("placeholder"),
|
SMTP_PORT: z.string().default("placeholder"),
|
||||||
SMTP_LOGIN: z.string().default("placeholder"),
|
SMTP_LOGIN: z.string().default("placeholder"),
|
||||||
SMTP_PASSWORD: z.string().default("placeholder"),
|
SMTP_PASSWORD: z.string().default("placeholder"),
|
||||||
WORKER_CONCURRENCY: z
|
|
||||||
.string()
|
|
||||||
.default("10")
|
|
||||||
.transform((val) => parseInt(val)),
|
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -72,7 +68,6 @@ export const env = createEnv({
|
|||||||
SMTP_PORT: process.env.SMTP_PORT,
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
SMTP_LOGIN: process.env.SMTP_LOGIN,
|
SMTP_LOGIN: process.env.SMTP_LOGIN,
|
||||||
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
|
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
|
||||||
WORKER_CONCURRENCY: process.env.WORKER_CONCURRENCY,
|
|
||||||
},
|
},
|
||||||
/**
|
/**
|
||||||
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.
|
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation.
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import {
|
|||||||
// templateSystemUserAssistantPrompt,
|
// templateSystemUserAssistantPrompt,
|
||||||
templateInstructionInputResponsePrompt,
|
templateInstructionInputResponsePrompt,
|
||||||
templateAiroborosPrompt,
|
templateAiroborosPrompt,
|
||||||
templateGryphePrompt,
|
|
||||||
templateVicunaPrompt,
|
templateVicunaPrompt,
|
||||||
} from "./templatePrompt";
|
} from "./templatePrompt";
|
||||||
|
|
||||||
@@ -70,15 +69,6 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatO
|
|||||||
learnMoreUrl: "https://huggingface.co/lmsys/vicuna-13b-v1.5",
|
learnMoreUrl: "https://huggingface.co/lmsys/vicuna-13b-v1.5",
|
||||||
templatePrompt: templateVicunaPrompt,
|
templatePrompt: templateVicunaPrompt,
|
||||||
},
|
},
|
||||||
"Gryphe/MythoMax-L2-13b": {
|
|
||||||
name: "MythoMax-L2-13b",
|
|
||||||
contextWindow: 4096,
|
|
||||||
pricePerSecond: 0.0003,
|
|
||||||
speed: "medium",
|
|
||||||
provider: "openpipe/Chat",
|
|
||||||
learnMoreUrl: "https://huggingface.co/Gryphe/MythoMax-L2-13b",
|
|
||||||
templatePrompt: templateGryphePrompt,
|
|
||||||
},
|
|
||||||
"NousResearch/Nous-Hermes-llama-2-7b": {
|
"NousResearch/Nous-Hermes-llama-2-7b": {
|
||||||
name: "Nous-Hermes-llama-2-7b",
|
name: "Nous-Hermes-llama-2-7b",
|
||||||
contextWindow: 4096,
|
contextWindow: 4096,
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ const modelEndpoints: Record<OpenpipeChatInput["model"], string> = {
|
|||||||
"NousResearch/Nous-Hermes-Llama2-13b": "https://ncv8pw3u0vb8j2-8000.proxy.runpod.net/v1",
|
"NousResearch/Nous-Hermes-Llama2-13b": "https://ncv8pw3u0vb8j2-8000.proxy.runpod.net/v1",
|
||||||
"jondurbin/airoboros-l2-13b-gpt4-2.0": "https://9nrbx7oph4btou-8000.proxy.runpod.net/v1",
|
"jondurbin/airoboros-l2-13b-gpt4-2.0": "https://9nrbx7oph4btou-8000.proxy.runpod.net/v1",
|
||||||
"lmsys/vicuna-13b-v1.5": "https://h88hkt3ux73rb7-8000.proxy.runpod.net/v1",
|
"lmsys/vicuna-13b-v1.5": "https://h88hkt3ux73rb7-8000.proxy.runpod.net/v1",
|
||||||
"Gryphe/MythoMax-L2-13b": "https://3l5jvhnxdgky3v-8000.proxy.runpod.net/v1",
|
|
||||||
"NousResearch/Nous-Hermes-llama-2-7b": "https://ua1bpc6kv3dgge-8000.proxy.runpod.net/v1",
|
"NousResearch/Nous-Hermes-llama-2-7b": "https://ua1bpc6kv3dgge-8000.proxy.runpod.net/v1",
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ const supportedModels = [
|
|||||||
"NousResearch/Nous-Hermes-Llama2-13b",
|
"NousResearch/Nous-Hermes-Llama2-13b",
|
||||||
"jondurbin/airoboros-l2-13b-gpt4-2.0",
|
"jondurbin/airoboros-l2-13b-gpt4-2.0",
|
||||||
"lmsys/vicuna-13b-v1.5",
|
"lmsys/vicuna-13b-v1.5",
|
||||||
"Gryphe/MythoMax-L2-13b",
|
|
||||||
"NousResearch/Nous-Hermes-llama-2-7b",
|
"NousResearch/Nous-Hermes-llama-2-7b",
|
||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,6 @@
|
|||||||
"NousResearch/Nous-Hermes-Llama2-13b",
|
"NousResearch/Nous-Hermes-Llama2-13b",
|
||||||
"jondurbin/airoboros-l2-13b-gpt4-2.0",
|
"jondurbin/airoboros-l2-13b-gpt4-2.0",
|
||||||
"lmsys/vicuna-13b-v1.5",
|
"lmsys/vicuna-13b-v1.5",
|
||||||
"Gryphe/MythoMax-L2-13b",
|
|
||||||
"NousResearch/Nous-Hermes-llama-2-7b"
|
"NousResearch/Nous-Hermes-llama-2-7b"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -223,52 +223,3 @@ export const templateVicunaPrompt = (messages: OpenpipeChatInput["messages"]) =>
|
|||||||
|
|
||||||
return prompt.trim();
|
return prompt.trim();
|
||||||
};
|
};
|
||||||
|
|
||||||
// <System prompt/Character Card>
|
|
||||||
|
|
||||||
// ### Instruction:
|
|
||||||
// Your instruction or question here.
|
|
||||||
// For roleplay purposes, I suggest the following - Write <CHAR NAME>'s next reply in a chat between <YOUR NAME> and <CHAR NAME>. Write a single reply only.
|
|
||||||
|
|
||||||
// ### Response:
|
|
||||||
export const templateGryphePrompt = (messages: OpenpipeChatInput["messages"]) => {
|
|
||||||
const splitter = "\n\n";
|
|
||||||
|
|
||||||
const instructionTag = "### Instruction:\n";
|
|
||||||
const responseTag = "### Response:\n";
|
|
||||||
|
|
||||||
let combinedSystemMessage = "";
|
|
||||||
const conversationMessages = [];
|
|
||||||
|
|
||||||
for (const message of messages) {
|
|
||||||
if (message.role === "system") {
|
|
||||||
combinedSystemMessage += message.content;
|
|
||||||
} else if (message.role === "user") {
|
|
||||||
conversationMessages.push(instructionTag + message.content);
|
|
||||||
} else {
|
|
||||||
conversationMessages.push(responseTag + message.content);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let systemMessage = "";
|
|
||||||
|
|
||||||
if (combinedSystemMessage) {
|
|
||||||
// If there is no user message, add a user tag to the system message
|
|
||||||
if (conversationMessages.find((message) => message.startsWith(instructionTag))) {
|
|
||||||
systemMessage = `${combinedSystemMessage}\n\n`;
|
|
||||||
} else {
|
|
||||||
conversationMessages.unshift(instructionTag + combinedSystemMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
|
|
||||||
|
|
||||||
// Ensure that the prompt ends with an assistant message
|
|
||||||
const lastInstructionIndex = prompt.lastIndexOf(instructionTag);
|
|
||||||
const lastAssistantIndex = prompt.lastIndexOf(responseTag);
|
|
||||||
if (lastInstructionIndex > lastAssistantIndex) {
|
|
||||||
prompt += splitter + responseTag;
|
|
||||||
}
|
|
||||||
|
|
||||||
return prompt;
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ export default function Experiment() {
|
|||||||
<ExperimentHeaderButtons />
|
<ExperimentHeaderButtons />
|
||||||
</PageHeaderContainer>
|
</PageHeaderContainer>
|
||||||
<ExperimentSettingsDrawer />
|
<ExperimentSettingsDrawer />
|
||||||
<Box w="100%" overflowX="auto" flex={1} id="output-container">
|
<Box w="100%" overflowX="auto" flex={1}>
|
||||||
<OutputsTable experimentId={experiment.data?.id} />
|
<OutputsTable experimentId={experiment.data?.id} />
|
||||||
</Box>
|
</Box>
|
||||||
</VStack>
|
</VStack>
|
||||||
|
|||||||
13
app/src/server/api/external/v1Api.router.ts
vendored
13
app/src/server/api/external/v1Api.router.ts
vendored
@@ -66,7 +66,7 @@ export const v1ApiRouter = createOpenApiRouter({
|
|||||||
|
|
||||||
if (!existingResponse) return { respPayload: null };
|
if (!existingResponse) return { respPayload: null };
|
||||||
|
|
||||||
const newCall = await prisma.loggedCall.create({
|
await prisma.loggedCall.create({
|
||||||
data: {
|
data: {
|
||||||
projectId: ctx.key.projectId,
|
projectId: ctx.key.projectId,
|
||||||
requestedAt: new Date(input.requestedAt),
|
requestedAt: new Date(input.requestedAt),
|
||||||
@@ -75,7 +75,11 @@ export const v1ApiRouter = createOpenApiRouter({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
await createTags(newCall.projectId, newCall.id, input.tags);
|
await createTags(
|
||||||
|
existingResponse.originalLoggedCall.projectId,
|
||||||
|
existingResponse.originalLoggedCallId,
|
||||||
|
input.tags,
|
||||||
|
);
|
||||||
return {
|
return {
|
||||||
respPayload: existingResponse.respPayload,
|
respPayload: existingResponse.respPayload,
|
||||||
};
|
};
|
||||||
@@ -107,7 +111,7 @@ export const v1ApiRouter = createOpenApiRouter({
|
|||||||
.default({}),
|
.default({}),
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
.output(z.object({ status: z.union([z.literal("ok"), z.literal("error")]) }))
|
.output(z.object({ status: z.literal("ok") }))
|
||||||
.mutation(async ({ input, ctx }) => {
|
.mutation(async ({ input, ctx }) => {
|
||||||
const reqPayload = await reqValidator.spa(input.reqPayload);
|
const reqPayload = await reqValidator.spa(input.reqPayload);
|
||||||
const respPayload = await respValidator.spa(input.respPayload);
|
const respPayload = await respValidator.spa(input.respPayload);
|
||||||
@@ -208,7 +212,6 @@ export const v1ApiRouter = createOpenApiRouter({
|
|||||||
createdAt: true,
|
createdAt: true,
|
||||||
cacheHit: true,
|
cacheHit: true,
|
||||||
tags: true,
|
tags: true,
|
||||||
id: true,
|
|
||||||
modelResponse: {
|
modelResponse: {
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
@@ -234,7 +237,7 @@ async function createTags(projectId: string, loggedCallId: string, tags: Record<
|
|||||||
const tagsToCreate = Object.entries(tags).map(([name, value]) => ({
|
const tagsToCreate = Object.entries(tags).map(([name, value]) => ({
|
||||||
projectId,
|
projectId,
|
||||||
loggedCallId,
|
loggedCallId,
|
||||||
name: name.replaceAll(/[^a-zA-Z0-9_$.]/g, "_"),
|
name: name.replaceAll(/[^a-zA-Z0-9_$]/g, "_"),
|
||||||
value,
|
value,
|
||||||
}));
|
}));
|
||||||
await prisma.loggedCallTag.createMany({
|
await prisma.loggedCallTag.createMany({
|
||||||
|
|||||||
@@ -178,7 +178,6 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
existingToNewVariantIds.set(variant.id, newVariantId);
|
existingToNewVariantIds.set(variant.id, newVariantId);
|
||||||
variantsToCreate.push({
|
variantsToCreate.push({
|
||||||
...variant,
|
...variant,
|
||||||
uiId: uuidv4(),
|
|
||||||
id: newVariantId,
|
id: newVariantId,
|
||||||
experimentId: newExperimentId,
|
experimentId: newExperimentId,
|
||||||
});
|
});
|
||||||
@@ -192,7 +191,6 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
scenariosToCreate.push({
|
scenariosToCreate.push({
|
||||||
...scenario,
|
...scenario,
|
||||||
id: newScenarioId,
|
id: newScenarioId,
|
||||||
uiId: uuidv4(),
|
|
||||||
experimentId: newExperimentId,
|
experimentId: newExperimentId,
|
||||||
variableValues: scenario.variableValues as Prisma.InputJsonValue,
|
variableValues: scenario.variableValues as Prisma.InputJsonValue,
|
||||||
});
|
});
|
||||||
|
|||||||
19
app/src/server/scripts/openai-test.ts
Normal file
19
app/src/server/scripts/openai-test.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import "dotenv/config";
|
||||||
|
import { openai } from "../utils/openai";
|
||||||
|
|
||||||
|
const resp = await openai.chat.completions.create({
|
||||||
|
model: "gpt-3.5-turbo-0613",
|
||||||
|
stream: true,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: "count to 20",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
for await (const part of resp) {
|
||||||
|
console.log("part", part);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("final resp", resp);
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { type Helpers, type Task, makeWorkerUtils, TaskSpec } from "graphile-worker";
|
import { type Helpers, type Task, makeWorkerUtils } from "graphile-worker";
|
||||||
import { env } from "~/env.mjs";
|
import { env } from "~/env.mjs";
|
||||||
|
|
||||||
let workerUtilsPromise: ReturnType<typeof makeWorkerUtils> | null = null;
|
let workerUtilsPromise: ReturnType<typeof makeWorkerUtils> | null = null;
|
||||||
@@ -16,11 +16,9 @@ function defineTask<TPayload>(
|
|||||||
taskIdentifier: string,
|
taskIdentifier: string,
|
||||||
taskHandler: (payload: TPayload, helpers: Helpers) => Promise<void>,
|
taskHandler: (payload: TPayload, helpers: Helpers) => Promise<void>,
|
||||||
) {
|
) {
|
||||||
const enqueue = async (payload: TPayload, spec?: TaskSpec) => {
|
const enqueue = async (payload: TPayload, runAt?: Date) => {
|
||||||
console.log("Enqueuing task", taskIdentifier, payload);
|
console.log("Enqueuing task", taskIdentifier, payload);
|
||||||
|
await (await workerUtils()).addJob(taskIdentifier, payload, { runAt });
|
||||||
const utils = await workerUtils();
|
|
||||||
return await utils.addJob(taskIdentifier, payload, spec);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const handler = (payload: TPayload, helpers: Helpers) => {
|
const handler = (payload: TPayload, helpers: Helpers) => {
|
||||||
|
|||||||
@@ -153,7 +153,7 @@ export const queryModel = defineTask<QueryModelJob>("queryModel", async (task) =
|
|||||||
stream,
|
stream,
|
||||||
numPreviousTries: numPreviousTries + 1,
|
numPreviousTries: numPreviousTries + 1,
|
||||||
},
|
},
|
||||||
{ runAt: retryTime, jobKey: cellId },
|
retryTime,
|
||||||
);
|
);
|
||||||
await prisma.scenarioVariantCell.update({
|
await prisma.scenarioVariantCell.update({
|
||||||
where: { id: cellId },
|
where: { id: cellId },
|
||||||
@@ -184,6 +184,6 @@ export const queueQueryModel = async (cellId: string, stream: boolean) => {
|
|||||||
jobQueuedAt: new Date(),
|
jobQueuedAt: new Date(),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
queryModel.enqueue({ cellId, stream, numPreviousTries: 0 }, { jobKey: cellId }),
|
queryModel.enqueue({ cellId, stream, numPreviousTries: 0 }),
|
||||||
]);
|
]);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ const taskList = registeredTasks.reduce((acc, task) => {
|
|||||||
// Run a worker to execute jobs:
|
// Run a worker to execute jobs:
|
||||||
const runner = await run({
|
const runner = await run({
|
||||||
connectionString: env.DATABASE_URL,
|
connectionString: env.DATABASE_URL,
|
||||||
concurrency: env.WORKER_CONCURRENCY,
|
concurrency: 10,
|
||||||
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
|
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
|
||||||
noHandleSignals: false,
|
noHandleSignals: false,
|
||||||
pollInterval: 1000,
|
pollInterval: 1000,
|
||||||
|
|||||||
@@ -141,20 +141,10 @@
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"status": {
|
"status": {
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": [
|
||||||
"ok"
|
"ok"
|
||||||
]
|
]
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"error"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
|
|||||||
@@ -13,8 +13,7 @@ from .local_testing_only_get_latest_logged_call_response_200_tags import (
|
|||||||
from .report_json_body import ReportJsonBody
|
from .report_json_body import ReportJsonBody
|
||||||
from .report_json_body_tags import ReportJsonBodyTags
|
from .report_json_body_tags import ReportJsonBodyTags
|
||||||
from .report_response_200 import ReportResponse200
|
from .report_response_200 import ReportResponse200
|
||||||
from .report_response_200_status_type_0 import ReportResponse200StatusType0
|
from .report_response_200_status import ReportResponse200Status
|
||||||
from .report_response_200_status_type_1 import ReportResponse200StatusType1
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"CheckCacheJsonBody",
|
"CheckCacheJsonBody",
|
||||||
@@ -26,6 +25,5 @@ __all__ = (
|
|||||||
"ReportJsonBody",
|
"ReportJsonBody",
|
||||||
"ReportJsonBodyTags",
|
"ReportJsonBodyTags",
|
||||||
"ReportResponse200",
|
"ReportResponse200",
|
||||||
"ReportResponse200StatusType0",
|
"ReportResponse200Status",
|
||||||
"ReportResponse200StatusType1",
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
from typing import Any, Dict, Type, TypeVar, Union
|
from typing import Any, Dict, Type, TypeVar
|
||||||
|
|
||||||
from attrs import define
|
from attrs import define
|
||||||
|
|
||||||
from ..models.report_response_200_status_type_0 import ReportResponse200StatusType0
|
from ..models.report_response_200_status import ReportResponse200Status
|
||||||
from ..models.report_response_200_status_type_1 import ReportResponse200StatusType1
|
|
||||||
|
|
||||||
T = TypeVar("T", bound="ReportResponse200")
|
T = TypeVar("T", bound="ReportResponse200")
|
||||||
|
|
||||||
@@ -12,18 +11,12 @@ T = TypeVar("T", bound="ReportResponse200")
|
|||||||
class ReportResponse200:
|
class ReportResponse200:
|
||||||
"""
|
"""
|
||||||
Attributes:
|
Attributes:
|
||||||
status (Union[ReportResponse200StatusType0, ReportResponse200StatusType1]):
|
status (ReportResponse200Status):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
status: Union[ReportResponse200StatusType0, ReportResponse200StatusType1]
|
status: ReportResponse200Status
|
||||||
|
|
||||||
def to_dict(self) -> Dict[str, Any]:
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
status: str
|
|
||||||
|
|
||||||
if isinstance(self.status, ReportResponse200StatusType0):
|
|
||||||
status = self.status.value
|
|
||||||
|
|
||||||
else:
|
|
||||||
status = self.status.value
|
status = self.status.value
|
||||||
|
|
||||||
field_dict: Dict[str, Any] = {}
|
field_dict: Dict[str, Any] = {}
|
||||||
@@ -38,23 +31,7 @@ class ReportResponse200:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
||||||
d = src_dict.copy()
|
d = src_dict.copy()
|
||||||
|
status = ReportResponse200Status(d.pop("status"))
|
||||||
def _parse_status(data: object) -> Union[ReportResponse200StatusType0, ReportResponse200StatusType1]:
|
|
||||||
try:
|
|
||||||
if not isinstance(data, str):
|
|
||||||
raise TypeError()
|
|
||||||
status_type_0 = ReportResponse200StatusType0(data)
|
|
||||||
|
|
||||||
return status_type_0
|
|
||||||
except: # noqa: E722
|
|
||||||
pass
|
|
||||||
if not isinstance(data, str):
|
|
||||||
raise TypeError()
|
|
||||||
status_type_1 = ReportResponse200StatusType1(data)
|
|
||||||
|
|
||||||
return status_type_1
|
|
||||||
|
|
||||||
status = _parse_status(d.pop("status"))
|
|
||||||
|
|
||||||
report_response_200 = cls(
|
report_response_200 = cls(
|
||||||
status=status,
|
status=status,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
class ReportResponse200StatusType0(str, Enum):
|
class ReportResponse200Status(str, Enum):
|
||||||
OK = "ok"
|
OK = "ok"
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
|
|
||||||
class ReportResponse200StatusType1(str, Enum):
|
|
||||||
ERROR = "error"
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return str(self.value)
|
|
||||||
@@ -24,18 +24,10 @@ def _get_tags(openpipe_options):
|
|||||||
return ReportJsonBodyTags.from_dict(tags)
|
return ReportJsonBodyTags.from_dict(tags)
|
||||||
|
|
||||||
|
|
||||||
def _should_check_cache(openpipe_options, req_payload):
|
def _should_check_cache(openpipe_options):
|
||||||
if configured_client.token == "":
|
if configured_client.token == "":
|
||||||
return False
|
return False
|
||||||
|
return openpipe_options.get("cache", False)
|
||||||
cache_requested = openpipe_options.get("cache", False)
|
|
||||||
streaming = req_payload.get("stream", False)
|
|
||||||
if cache_requested and streaming:
|
|
||||||
print(
|
|
||||||
"Caching is not yet supported for streaming requests. Ignoring cache flag. Vote for this feature at https://github.com/OpenPipe/OpenPipe/issues/159"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
return cache_requested
|
|
||||||
|
|
||||||
|
|
||||||
def _process_cache_payload(
|
def _process_cache_payload(
|
||||||
@@ -52,7 +44,7 @@ def maybe_check_cache(
|
|||||||
openpipe_options={},
|
openpipe_options={},
|
||||||
req_payload={},
|
req_payload={},
|
||||||
):
|
):
|
||||||
if not _should_check_cache(openpipe_options, req_payload):
|
if not _should_check_cache(openpipe_options):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
payload = check_cache.sync(
|
payload = check_cache.sync(
|
||||||
@@ -76,7 +68,7 @@ async def maybe_check_cache_async(
|
|||||||
openpipe_options={},
|
openpipe_options={},
|
||||||
req_payload={},
|
req_payload={},
|
||||||
):
|
):
|
||||||
if not _should_check_cache(openpipe_options, req_payload):
|
if not _should_check_cache(openpipe_options):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -13,17 +13,15 @@
|
|||||||
"author": "",
|
"author": "",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"encoding": "^0.1.13",
|
|
||||||
"form-data": "^4.0.0",
|
"form-data": "^4.0.0",
|
||||||
"lodash-es": "^4.17.21",
|
"lodash-es": "^4.17.21",
|
||||||
"node-fetch": "^2.6.12",
|
"node-fetch": "^3.3.2",
|
||||||
"openai-beta": "npm:openai@4.0.0-beta.7",
|
"openai-beta": "npm:openai@4.0.0-beta.7",
|
||||||
"openai-legacy": "npm:openai@3.3.0"
|
"openai-legacy": "npm:openai@3.3.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/lodash-es": "^4.17.8",
|
"@types/lodash-es": "^4.17.8",
|
||||||
"@types/node": "^20.4.8",
|
"@types/node": "^20.4.8",
|
||||||
"@types/node-fetch": "^2.6.4",
|
|
||||||
"dotenv": "^16.3.1",
|
"dotenv": "^16.3.1",
|
||||||
"tsx": "^3.12.7",
|
"tsx": "^3.12.7",
|
||||||
"typescript": "^5.0.4",
|
"typescript": "^5.0.4",
|
||||||
|
|||||||
@@ -2,38 +2,42 @@
|
|||||||
/* istanbul ignore file */
|
/* istanbul ignore file */
|
||||||
/* tslint:disable */
|
/* tslint:disable */
|
||||||
/* eslint-disable */
|
/* eslint-disable */
|
||||||
import FormData from 'form-data';
|
import FormData from "form-data";
|
||||||
import fetch, { Headers } from 'node-fetch';
|
import fetch, { Headers } from "node-fetch";
|
||||||
import type { RequestInit, Response } from 'node-fetch';
|
import type { RequestInit, Response } from "node-fetch";
|
||||||
import type { AbortSignal } from 'node-fetch/externals';
|
|
||||||
|
|
||||||
import { ApiError } from './ApiError';
|
// @ts-expect-error TODO maybe I need an older node-fetch or something?
|
||||||
import type { ApiRequestOptions } from './ApiRequestOptions';
|
import type { AbortSignal } from "node-fetch/externals";
|
||||||
import type { ApiResult } from './ApiResult';
|
|
||||||
import { CancelablePromise } from './CancelablePromise';
|
|
||||||
import type { OnCancel } from './CancelablePromise';
|
|
||||||
import type { OpenAPIConfig } from './OpenAPI';
|
|
||||||
|
|
||||||
export const isDefined = <T>(value: T | null | undefined): value is Exclude<T, null | undefined> => {
|
import { ApiError } from "./ApiError";
|
||||||
|
import type { ApiRequestOptions } from "./ApiRequestOptions";
|
||||||
|
import type { ApiResult } from "./ApiResult";
|
||||||
|
import { CancelablePromise } from "./CancelablePromise";
|
||||||
|
import type { OnCancel } from "./CancelablePromise";
|
||||||
|
import type { OpenAPIConfig } from "./OpenAPI";
|
||||||
|
|
||||||
|
export const isDefined = <T>(
|
||||||
|
value: T | null | undefined
|
||||||
|
): value is Exclude<T, null | undefined> => {
|
||||||
return value !== undefined && value !== null;
|
return value !== undefined && value !== null;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const isString = (value: any): value is string => {
|
export const isString = (value: any): value is string => {
|
||||||
return typeof value === 'string';
|
return typeof value === "string";
|
||||||
};
|
};
|
||||||
|
|
||||||
export const isStringWithValue = (value: any): value is string => {
|
export const isStringWithValue = (value: any): value is string => {
|
||||||
return isString(value) && value !== '';
|
return isString(value) && value !== "";
|
||||||
};
|
};
|
||||||
|
|
||||||
export const isBlob = (value: any): value is Blob => {
|
export const isBlob = (value: any): value is Blob => {
|
||||||
return (
|
return (
|
||||||
typeof value === 'object' &&
|
typeof value === "object" &&
|
||||||
typeof value.type === 'string' &&
|
typeof value.type === "string" &&
|
||||||
typeof value.stream === 'function' &&
|
typeof value.stream === "function" &&
|
||||||
typeof value.arrayBuffer === 'function' &&
|
typeof value.arrayBuffer === "function" &&
|
||||||
typeof value.constructor === 'function' &&
|
typeof value.constructor === "function" &&
|
||||||
typeof value.constructor.name === 'string' &&
|
typeof value.constructor.name === "string" &&
|
||||||
/^(Blob|File)$/.test(value.constructor.name) &&
|
/^(Blob|File)$/.test(value.constructor.name) &&
|
||||||
/^(Blob|File)$/.test(value[Symbol.toStringTag])
|
/^(Blob|File)$/.test(value[Symbol.toStringTag])
|
||||||
);
|
);
|
||||||
@@ -48,7 +52,7 @@ export const base64 = (str: string): string => {
|
|||||||
return btoa(str);
|
return btoa(str);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
return Buffer.from(str).toString('base64');
|
return Buffer.from(str).toString("base64");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -62,10 +66,10 @@ export const getQueryString = (params: Record<string, any>): string => {
|
|||||||
const process = (key: string, value: any) => {
|
const process = (key: string, value: any) => {
|
||||||
if (isDefined(value)) {
|
if (isDefined(value)) {
|
||||||
if (Array.isArray(value)) {
|
if (Array.isArray(value)) {
|
||||||
value.forEach(v => {
|
value.forEach((v) => {
|
||||||
process(key, v);
|
process(key, v);
|
||||||
});
|
});
|
||||||
} else if (typeof value === 'object') {
|
} else if (typeof value === "object") {
|
||||||
Object.entries(value).forEach(([k, v]) => {
|
Object.entries(value).forEach(([k, v]) => {
|
||||||
process(`${key}[${k}]`, v);
|
process(`${key}[${k}]`, v);
|
||||||
});
|
});
|
||||||
@@ -80,17 +84,17 @@ export const getQueryString = (params: Record<string, any>): string => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (qs.length > 0) {
|
if (qs.length > 0) {
|
||||||
return `?${qs.join('&')}`;
|
return `?${qs.join("&")}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
return '';
|
return "";
|
||||||
};
|
};
|
||||||
|
|
||||||
const getUrl = (config: OpenAPIConfig, options: ApiRequestOptions): string => {
|
const getUrl = (config: OpenAPIConfig, options: ApiRequestOptions): string => {
|
||||||
const encoder = config.ENCODE_PATH || encodeURI;
|
const encoder = config.ENCODE_PATH || encodeURI;
|
||||||
|
|
||||||
const path = options.url
|
const path = options.url
|
||||||
.replace('{api-version}', config.VERSION)
|
.replace("{api-version}", config.VERSION)
|
||||||
.replace(/{(.*?)}/g, (substring: string, group: string) => {
|
.replace(/{(.*?)}/g, (substring: string, group: string) => {
|
||||||
if (options.path?.hasOwnProperty(group)) {
|
if (options.path?.hasOwnProperty(group)) {
|
||||||
return encoder(String(options.path[group]));
|
return encoder(String(options.path[group]));
|
||||||
@@ -121,7 +125,7 @@ export const getFormData = (options: ApiRequestOptions): FormData | undefined =>
|
|||||||
.filter(([_, value]) => isDefined(value))
|
.filter(([_, value]) => isDefined(value))
|
||||||
.forEach(([key, value]) => {
|
.forEach(([key, value]) => {
|
||||||
if (Array.isArray(value)) {
|
if (Array.isArray(value)) {
|
||||||
value.forEach(v => process(key, v));
|
value.forEach((v) => process(key, v));
|
||||||
} else {
|
} else {
|
||||||
process(key, value);
|
process(key, value);
|
||||||
}
|
}
|
||||||
@@ -134,48 +138,57 @@ export const getFormData = (options: ApiRequestOptions): FormData | undefined =>
|
|||||||
|
|
||||||
type Resolver<T> = (options: ApiRequestOptions) => Promise<T>;
|
type Resolver<T> = (options: ApiRequestOptions) => Promise<T>;
|
||||||
|
|
||||||
export const resolve = async <T>(options: ApiRequestOptions, resolver?: T | Resolver<T>): Promise<T | undefined> => {
|
export const resolve = async <T>(
|
||||||
if (typeof resolver === 'function') {
|
options: ApiRequestOptions,
|
||||||
|
resolver?: T | Resolver<T>
|
||||||
|
): Promise<T | undefined> => {
|
||||||
|
if (typeof resolver === "function") {
|
||||||
return (resolver as Resolver<T>)(options);
|
return (resolver as Resolver<T>)(options);
|
||||||
}
|
}
|
||||||
return resolver;
|
return resolver;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getHeaders = async (config: OpenAPIConfig, options: ApiRequestOptions): Promise<Headers> => {
|
export const getHeaders = async (
|
||||||
|
config: OpenAPIConfig,
|
||||||
|
options: ApiRequestOptions
|
||||||
|
): Promise<Headers> => {
|
||||||
const token = await resolve(options, config.TOKEN);
|
const token = await resolve(options, config.TOKEN);
|
||||||
const username = await resolve(options, config.USERNAME);
|
const username = await resolve(options, config.USERNAME);
|
||||||
const password = await resolve(options, config.PASSWORD);
|
const password = await resolve(options, config.PASSWORD);
|
||||||
const additionalHeaders = await resolve(options, config.HEADERS);
|
const additionalHeaders = await resolve(options, config.HEADERS);
|
||||||
|
|
||||||
const headers = Object.entries({
|
const headers = Object.entries({
|
||||||
Accept: 'application/json',
|
Accept: "application/json",
|
||||||
...additionalHeaders,
|
...additionalHeaders,
|
||||||
...options.headers,
|
...options.headers,
|
||||||
})
|
})
|
||||||
.filter(([_, value]) => isDefined(value))
|
.filter(([_, value]) => isDefined(value))
|
||||||
.reduce((headers, [key, value]) => ({
|
.reduce(
|
||||||
|
(headers, [key, value]) => ({
|
||||||
...headers,
|
...headers,
|
||||||
[key]: String(value),
|
[key]: String(value),
|
||||||
}), {} as Record<string, string>);
|
}),
|
||||||
|
{} as Record<string, string>
|
||||||
|
);
|
||||||
|
|
||||||
if (isStringWithValue(token)) {
|
if (isStringWithValue(token)) {
|
||||||
headers['Authorization'] = `Bearer ${token}`;
|
headers["Authorization"] = `Bearer ${token}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isStringWithValue(username) && isStringWithValue(password)) {
|
if (isStringWithValue(username) && isStringWithValue(password)) {
|
||||||
const credentials = base64(`${username}:${password}`);
|
const credentials = base64(`${username}:${password}`);
|
||||||
headers['Authorization'] = `Basic ${credentials}`;
|
headers["Authorization"] = `Basic ${credentials}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options.body) {
|
if (options.body) {
|
||||||
if (options.mediaType) {
|
if (options.mediaType) {
|
||||||
headers['Content-Type'] = options.mediaType;
|
headers["Content-Type"] = options.mediaType;
|
||||||
} else if (isBlob(options.body)) {
|
} else if (isBlob(options.body)) {
|
||||||
headers['Content-Type'] = 'application/octet-stream';
|
headers["Content-Type"] = "application/octet-stream";
|
||||||
} else if (isString(options.body)) {
|
} else if (isString(options.body)) {
|
||||||
headers['Content-Type'] = 'text/plain';
|
headers["Content-Type"] = "text/plain";
|
||||||
} else if (!isFormData(options.body)) {
|
} else if (!isFormData(options.body)) {
|
||||||
headers['Content-Type'] = 'application/json';
|
headers["Content-Type"] = "application/json";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -184,8 +197,8 @@ export const getHeaders = async (config: OpenAPIConfig, options: ApiRequestOptio
|
|||||||
|
|
||||||
export const getRequestBody = (options: ApiRequestOptions): any => {
|
export const getRequestBody = (options: ApiRequestOptions): any => {
|
||||||
if (options.body !== undefined) {
|
if (options.body !== undefined) {
|
||||||
if (options.mediaType?.includes('/json')) {
|
if (options.mediaType?.includes("/json")) {
|
||||||
return JSON.stringify(options.body)
|
return JSON.stringify(options.body);
|
||||||
} else if (isString(options.body) || isBlob(options.body) || isFormData(options.body)) {
|
} else if (isString(options.body) || isBlob(options.body) || isFormData(options.body)) {
|
||||||
return options.body as any;
|
return options.body as any;
|
||||||
} else {
|
} else {
|
||||||
@@ -217,7 +230,10 @@ export const sendRequest = async (
|
|||||||
return await fetch(url, request);
|
return await fetch(url, request);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getResponseHeader = (response: Response, responseHeader?: string): string | undefined => {
|
export const getResponseHeader = (
|
||||||
|
response: Response,
|
||||||
|
responseHeader?: string
|
||||||
|
): string | undefined => {
|
||||||
if (responseHeader) {
|
if (responseHeader) {
|
||||||
const content = response.headers.get(responseHeader);
|
const content = response.headers.get(responseHeader);
|
||||||
if (isString(content)) {
|
if (isString(content)) {
|
||||||
@@ -230,10 +246,10 @@ export const getResponseHeader = (response: Response, responseHeader?: string):
|
|||||||
export const getResponseBody = async (response: Response): Promise<any> => {
|
export const getResponseBody = async (response: Response): Promise<any> => {
|
||||||
if (response.status !== 204) {
|
if (response.status !== 204) {
|
||||||
try {
|
try {
|
||||||
const contentType = response.headers.get('Content-Type');
|
const contentType = response.headers.get("Content-Type");
|
||||||
if (contentType) {
|
if (contentType) {
|
||||||
const jsonTypes = ['application/json', 'application/problem+json']
|
const jsonTypes = ["application/json", "application/problem+json"];
|
||||||
const isJSON = jsonTypes.some(type => contentType.toLowerCase().startsWith(type));
|
const isJSON = jsonTypes.some((type) => contentType.toLowerCase().startsWith(type));
|
||||||
if (isJSON) {
|
if (isJSON) {
|
||||||
return await response.json();
|
return await response.json();
|
||||||
} else {
|
} else {
|
||||||
@@ -249,15 +265,15 @@ export const getResponseBody = async (response: Response): Promise<any> => {
|
|||||||
|
|
||||||
export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult): void => {
|
export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult): void => {
|
||||||
const errors: Record<number, string> = {
|
const errors: Record<number, string> = {
|
||||||
400: 'Bad Request',
|
400: "Bad Request",
|
||||||
401: 'Unauthorized',
|
401: "Unauthorized",
|
||||||
403: 'Forbidden',
|
403: "Forbidden",
|
||||||
404: 'Not Found',
|
404: "Not Found",
|
||||||
500: 'Internal Server Error',
|
500: "Internal Server Error",
|
||||||
502: 'Bad Gateway',
|
502: "Bad Gateway",
|
||||||
503: 'Service Unavailable',
|
503: "Service Unavailable",
|
||||||
...options.errors,
|
...options.errors,
|
||||||
}
|
};
|
||||||
|
|
||||||
const error = errors[result.status];
|
const error = errors[result.status];
|
||||||
if (error) {
|
if (error) {
|
||||||
@@ -265,8 +281,8 @@ export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!result.ok) {
|
if (!result.ok) {
|
||||||
const errorStatus = result.status ?? 'unknown';
|
const errorStatus = result.status ?? "unknown";
|
||||||
const errorStatusText = result.statusText ?? 'unknown';
|
const errorStatusText = result.statusText ?? "unknown";
|
||||||
const errorBody = (() => {
|
const errorBody = (() => {
|
||||||
try {
|
try {
|
||||||
return JSON.stringify(result.body, null, 2);
|
return JSON.stringify(result.body, null, 2);
|
||||||
@@ -275,7 +291,9 @@ export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult):
|
|||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
|
|
||||||
throw new ApiError(options, result,
|
throw new ApiError(
|
||||||
|
options,
|
||||||
|
result,
|
||||||
`Generic Error: status: ${errorStatus}; status text: ${errorStatusText}; body: ${errorBody}`
|
`Generic Error: status: ${errorStatus}; status text: ${errorStatusText}; body: ${errorBody}`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -288,7 +306,10 @@ export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult):
|
|||||||
* @returns CancelablePromise<T>
|
* @returns CancelablePromise<T>
|
||||||
* @throws ApiError
|
* @throws ApiError
|
||||||
*/
|
*/
|
||||||
export const request = <T>(config: OpenAPIConfig, options: ApiRequestOptions): CancelablePromise<T> => {
|
export const request = <T>(
|
||||||
|
config: OpenAPIConfig,
|
||||||
|
options: ApiRequestOptions
|
||||||
|
): CancelablePromise<T> => {
|
||||||
return new CancelablePromise(async (resolve, reject, onCancel) => {
|
return new CancelablePromise(async (resolve, reject, onCancel) => {
|
||||||
try {
|
try {
|
||||||
const url = getUrl(config, options);
|
const url = getUrl(config, options);
|
||||||
|
|||||||
@@ -82,7 +82,7 @@ export class DefaultService {
|
|||||||
tags?: Record<string, string>;
|
tags?: Record<string, string>;
|
||||||
},
|
},
|
||||||
): CancelablePromise<{
|
): CancelablePromise<{
|
||||||
status: ('ok' | 'error');
|
status: 'ok';
|
||||||
}> {
|
}> {
|
||||||
return this.httpRequest.request({
|
return this.httpRequest.request({
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|||||||
@@ -2,13 +2,10 @@ import dotenv from "dotenv";
|
|||||||
import { expect, test } from "vitest";
|
import { expect, test } from "vitest";
|
||||||
import OpenAI from ".";
|
import OpenAI from ".";
|
||||||
import {
|
import {
|
||||||
ChatCompletion,
|
|
||||||
CompletionCreateParams,
|
CompletionCreateParams,
|
||||||
CreateChatCompletionRequestMessage,
|
CreateChatCompletionRequestMessage,
|
||||||
} from "openai-beta/resources/chat/completions";
|
} from "openai-beta/resources/chat/completions";
|
||||||
import { OPClient } from "../codegen";
|
import { OPClient } from "../codegen";
|
||||||
import mergeChunks from "./mergeChunks";
|
|
||||||
import assert from "assert";
|
|
||||||
|
|
||||||
dotenv.config({ path: "../.env" });
|
dotenv.config({ path: "../.env" });
|
||||||
|
|
||||||
@@ -34,7 +31,9 @@ test("basic call", async () => {
|
|||||||
};
|
};
|
||||||
const completion = await oaiClient.chat.completions.create({
|
const completion = await oaiClient.chat.completions.create({
|
||||||
...payload,
|
...payload,
|
||||||
openpipe: { tags: { promptId: "test" } },
|
openpipe: {
|
||||||
|
tags: { promptId: "test" },
|
||||||
|
},
|
||||||
});
|
});
|
||||||
await completion.openpipe.reportingFinished;
|
await completion.openpipe.reportingFinished;
|
||||||
const lastLogged = await lastLoggedCall();
|
const lastLogged = await lastLoggedCall();
|
||||||
@@ -47,32 +46,29 @@ const randomString = (length: number) => {
|
|||||||
const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||||
return Array.from(
|
return Array.from(
|
||||||
{ length },
|
{ length },
|
||||||
() => characters[Math.floor(Math.random() * characters.length)],
|
() => characters[Math.floor(Math.random() * characters.length)]
|
||||||
).join("");
|
).join("");
|
||||||
};
|
};
|
||||||
|
|
||||||
test("streaming", async () => {
|
test.skip("streaming", async () => {
|
||||||
const completion = await oaiClient.chat.completions.create({
|
const completion = await oaiClient.chat.completions.create({
|
||||||
model: "gpt-3.5-turbo",
|
model: "gpt-3.5-turbo",
|
||||||
messages: [{ role: "system", content: "count to 3" }],
|
messages: [{ role: "system", content: "count to 4" }],
|
||||||
stream: true,
|
stream: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
let merged: ChatCompletion | null = null;
|
let merged = null;
|
||||||
for await (const chunk of completion) {
|
for await (const chunk of completion) {
|
||||||
merged = mergeChunks(merged, chunk);
|
merged = merge_openai_chunks(merged, chunk);
|
||||||
}
|
}
|
||||||
|
|
||||||
const lastLogged = await lastLoggedCall();
|
const lastLogged = await lastLoggedCall();
|
||||||
await completion.openpipe.reportingFinished;
|
expect(lastLogged?.modelResponse?.respPayload.choices[0].message.content).toBe(
|
||||||
|
merged.choices[0].message.content
|
||||||
expect(merged).toMatchObject(lastLogged?.modelResponse?.respPayload);
|
);
|
||||||
expect(lastLogged?.modelResponse?.reqPayload.messages).toMatchObject([
|
|
||||||
{ role: "system", content: "count to 3" },
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("bad call streaming", async () => {
|
test.skip("bad call streaming", async () => {
|
||||||
try {
|
try {
|
||||||
await oaiClient.chat.completions.create({
|
await oaiClient.chat.completions.create({
|
||||||
model: "gpt-3.5-turbo-blaster",
|
model: "gpt-3.5-turbo-blaster",
|
||||||
@@ -80,29 +76,26 @@ test("bad call streaming", async () => {
|
|||||||
stream: true,
|
stream: true,
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
await e.openpipe.reportingFinished;
|
|
||||||
const lastLogged = await lastLoggedCall();
|
const lastLogged = await lastLoggedCall();
|
||||||
expect(lastLogged?.modelResponse?.errorMessage).toEqual(
|
expect(lastLogged?.modelResponse?.errorMessage).toBe(
|
||||||
"The model `gpt-3.5-turbo-blaster` does not exist",
|
"The model `gpt-3.5-turbo-blaster` does not exist"
|
||||||
);
|
);
|
||||||
expect(lastLogged?.modelResponse?.statusCode).toEqual(404);
|
expect(lastLogged?.modelResponse?.statusCode).toBe(404);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test("bad call", async () => {
|
test("bad call", async () => {
|
||||||
try {
|
try {
|
||||||
await oaiClient.chat.completions.create({
|
await oaiClient.chat.completions.create({
|
||||||
model: "gpt-3.5-turbo-buster",
|
model: "gpt-3.5-turbo-booster",
|
||||||
messages: [{ role: "system", content: "count to 10" }],
|
messages: [{ role: "system", content: "count to 10" }],
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
assert("openpipe" in e);
|
|
||||||
await e.openpipe.reportingFinished;
|
|
||||||
const lastLogged = await lastLoggedCall();
|
const lastLogged = await lastLoggedCall();
|
||||||
expect(lastLogged?.modelResponse?.errorMessage).toEqual(
|
expect(lastLogged?.modelResponse?.errorMessage).toBe(
|
||||||
"The model `gpt-3.5-turbo-buster` does not exist",
|
"The model `gpt-3.5-turbo-booster` does not exist"
|
||||||
);
|
);
|
||||||
expect(lastLogged?.modelResponse?.statusCode).toEqual(404);
|
expect(lastLogged?.modelResponse?.statusCode).toBe(404);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -116,12 +109,12 @@ test("caching", async () => {
|
|||||||
messages: [message],
|
messages: [message],
|
||||||
openpipe: { cache: true },
|
openpipe: { cache: true },
|
||||||
});
|
});
|
||||||
expect(completion.openpipe.cacheStatus).toEqual("MISS");
|
expect(completion.openpipe.cacheStatus).toBe("MISS");
|
||||||
|
|
||||||
await completion.openpipe.reportingFinished;
|
await completion.openpipe.reportingFinished;
|
||||||
const firstLogged = await lastLoggedCall();
|
const firstLogged = await lastLoggedCall();
|
||||||
expect(completion.choices[0].message.content).toEqual(
|
expect(completion.choices[0].message.content).toBe(
|
||||||
firstLogged?.modelResponse?.respPayload.choices[0].message.content,
|
firstLogged?.modelResponse?.respPayload.choices[0].message.content
|
||||||
);
|
);
|
||||||
|
|
||||||
const completion2 = await oaiClient.chat.completions.create({
|
const completion2 = await oaiClient.chat.completions.create({
|
||||||
@@ -129,5 +122,5 @@ test("caching", async () => {
|
|||||||
messages: [message],
|
messages: [message],
|
||||||
openpipe: { cache: true },
|
openpipe: { cache: true },
|
||||||
});
|
});
|
||||||
expect(completion2.openpipe.cacheStatus).toEqual("HIT");
|
expect(completion2.openpipe.cacheStatus).toBe("HIT");
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -5,9 +5,9 @@ import {
|
|||||||
ChatCompletion,
|
ChatCompletion,
|
||||||
ChatCompletionChunk,
|
ChatCompletionChunk,
|
||||||
CompletionCreateParams,
|
CompletionCreateParams,
|
||||||
|
Completions,
|
||||||
} from "openai-beta/resources/chat/completions";
|
} from "openai-beta/resources/chat/completions";
|
||||||
|
|
||||||
import { WrappedStream } from "./streaming";
|
|
||||||
import { DefaultService, OPClient } from "../codegen";
|
import { DefaultService, OPClient } from "../codegen";
|
||||||
import { Stream } from "openai-beta/streaming";
|
import { Stream } from "openai-beta/streaming";
|
||||||
import { OpenPipeArgs, OpenPipeMeta, type OpenPipeConfig, getTags } from "../shared";
|
import { OpenPipeArgs, OpenPipeMeta, type OpenPipeConfig, getTags } from "../shared";
|
||||||
@@ -27,11 +27,11 @@ export default class OpenAI extends openai.OpenAI {
|
|||||||
BASE:
|
BASE:
|
||||||
openpipe?.baseUrl ?? readEnv("OPENPIPE_BASE_URL") ?? "https://app.openpipe.ai/api/v1",
|
openpipe?.baseUrl ?? readEnv("OPENPIPE_BASE_URL") ?? "https://app.openpipe.ai/api/v1",
|
||||||
TOKEN: openPipeApiKey,
|
TOKEN: openPipeApiKey,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
console.warn(
|
console.warn(
|
||||||
"You're using the OpenPipe client without an API key. No completion requests will be logged.",
|
"You're using the OpenPipe client without an API key. No completion requests will be logged."
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -43,10 +43,10 @@ class WrappedChat extends openai.OpenAI.Chat {
|
|||||||
this.completions.opClient = client;
|
this.completions.opClient = client;
|
||||||
}
|
}
|
||||||
|
|
||||||
completions: WrappedCompletions = new WrappedCompletions(this.client);
|
completions: InstrumentedCompletions = new InstrumentedCompletions(this.client);
|
||||||
}
|
}
|
||||||
|
|
||||||
class WrappedCompletions extends openai.OpenAI.Chat.Completions {
|
class InstrumentedCompletions extends openai.OpenAI.Chat.Completions {
|
||||||
opClient?: OPClient;
|
opClient?: OPClient;
|
||||||
|
|
||||||
constructor(client: openai.OpenAI, opClient?: OPClient) {
|
constructor(client: openai.OpenAI, opClient?: OPClient) {
|
||||||
@@ -54,35 +54,32 @@ class WrappedCompletions extends openai.OpenAI.Chat.Completions {
|
|||||||
this.opClient = opClient;
|
this.opClient = opClient;
|
||||||
}
|
}
|
||||||
|
|
||||||
async _report(args: Parameters<DefaultService["report"]>[0]) {
|
_report(args: Parameters<DefaultService["report"]>[0]) {
|
||||||
try {
|
try {
|
||||||
this.opClient ? await this.opClient.default.report(args) : Promise.resolve();
|
return this.opClient ? this.opClient.default.report(args) : Promise.resolve();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
console.error(e);
|
||||||
|
return Promise.resolve();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
create(
|
create(
|
||||||
body: CompletionCreateParams.CreateChatCompletionRequestNonStreaming & OpenPipeArgs,
|
body: CompletionCreateParams.CreateChatCompletionRequestNonStreaming & OpenPipeArgs,
|
||||||
options?: Core.RequestOptions,
|
options?: Core.RequestOptions
|
||||||
): Promise<Core.APIResponse<ChatCompletion & { openpipe: OpenPipeMeta }>>;
|
): Promise<Core.APIResponse<ChatCompletion & { openpipe: OpenPipeMeta }>>;
|
||||||
create(
|
create(
|
||||||
body: CompletionCreateParams.CreateChatCompletionRequestStreaming & OpenPipeArgs,
|
body: CompletionCreateParams.CreateChatCompletionRequestStreaming & OpenPipeArgs,
|
||||||
options?: Core.RequestOptions,
|
options?: Core.RequestOptions
|
||||||
): Promise<Core.APIResponse<WrappedStream>>;
|
): Promise<Core.APIResponse<Stream<ChatCompletionChunk>>>;
|
||||||
async create(
|
async create(
|
||||||
{ openpipe, ...body }: CompletionCreateParams & OpenPipeArgs,
|
{ openpipe, ...body }: CompletionCreateParams & OpenPipeArgs,
|
||||||
options?: Core.RequestOptions,
|
options?: Core.RequestOptions
|
||||||
): Promise<Core.APIResponse<(ChatCompletion & { openpipe: OpenPipeMeta }) | WrappedStream>> {
|
): Promise<
|
||||||
|
Core.APIResponse<(ChatCompletion & { openpipe: OpenPipeMeta }) | Stream<ChatCompletionChunk>>
|
||||||
|
> {
|
||||||
|
console.log("LALALA REPORT", this.opClient);
|
||||||
const requestedAt = Date.now();
|
const requestedAt = Date.now();
|
||||||
let reportingFinished: OpenPipeMeta["reportingFinished"] = Promise.resolve();
|
const cacheRequested = openpipe?.cache ?? false;
|
||||||
let cacheRequested = openpipe?.cache ?? false;
|
|
||||||
if (cacheRequested && body.stream) {
|
|
||||||
console.warn(
|
|
||||||
`Caching is not yet supported for streaming requests. Ignoring cache flag. Vote for this feature at https://github.com/OpenPipe/OpenPipe/issues/159`,
|
|
||||||
);
|
|
||||||
cacheRequested = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (cacheRequested) {
|
if (cacheRequested) {
|
||||||
try {
|
try {
|
||||||
@@ -95,13 +92,12 @@ class WrappedCompletions extends openai.OpenAI.Chat.Completions {
|
|||||||
.then((res) => res.respPayload);
|
.then((res) => res.respPayload);
|
||||||
|
|
||||||
if (cached) {
|
if (cached) {
|
||||||
const meta = {
|
|
||||||
cacheStatus: "HIT",
|
|
||||||
reportingFinished,
|
|
||||||
};
|
|
||||||
return {
|
return {
|
||||||
...cached,
|
...cached,
|
||||||
openpipe: meta,
|
openpipe: {
|
||||||
|
cacheStatus: "HIT",
|
||||||
|
reportingFinished: Promise.resolve(),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
@@ -109,23 +105,15 @@ class WrappedCompletions extends openai.OpenAI.Chat.Completions {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let reportingFinished: OpenPipeMeta["reportingFinished"] = Promise.resolve();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (body.stream) {
|
if (body.stream) {
|
||||||
const stream = await super.create(body, options);
|
const stream = await super.create(body, options);
|
||||||
const wrappedStream = new WrappedStream(stream, (response) =>
|
|
||||||
this._report({
|
|
||||||
requestedAt,
|
|
||||||
receivedAt: Date.now(),
|
|
||||||
reqPayload: body,
|
|
||||||
respPayload: response,
|
|
||||||
statusCode: 200,
|
|
||||||
tags: getTags(openpipe),
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Do some logging of each chunk here
|
// Do some logging of each chunk here
|
||||||
|
|
||||||
return wrappedStream;
|
return stream;
|
||||||
} else {
|
} else {
|
||||||
const response = await super.create(body, options);
|
const response = await super.create(body, options);
|
||||||
|
|
||||||
@@ -159,16 +147,6 @@ class WrappedCompletions extends openai.OpenAI.Chat.Completions {
|
|||||||
tags: getTags(openpipe),
|
tags: getTags(openpipe),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// make sure error is an object we can add properties to
|
|
||||||
if (typeof error === "object" && error !== null) {
|
|
||||||
error = {
|
|
||||||
...error,
|
|
||||||
openpipe: {
|
|
||||||
cacheStatus: cacheRequested ? "MISS" : "SKIP",
|
|
||||||
reportingFinished,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
import { ChatCompletion, ChatCompletionChunk } from "openai-beta/resources/chat";
|
|
||||||
import { Stream } from "openai-beta/streaming";
|
|
||||||
import { OpenPipeMeta } from "../shared";
|
|
||||||
import mergeChunks from "./mergeChunks";
|
|
||||||
|
|
||||||
export class WrappedStream extends Stream<ChatCompletionChunk> {
|
|
||||||
openpipe: OpenPipeMeta;
|
|
||||||
|
|
||||||
private resolveReportingFinished: () => void = () => {};
|
|
||||||
private report: (response: unknown) => Promise<void>;
|
|
||||||
|
|
||||||
constructor(stream: Stream<ChatCompletionChunk>, report: (response: unknown) => Promise<void>) {
|
|
||||||
super(stream.response, stream.controller);
|
|
||||||
this.report = report;
|
|
||||||
|
|
||||||
const reportingFinished = new Promise<void>((resolve) => {
|
|
||||||
this.resolveReportingFinished = resolve;
|
|
||||||
});
|
|
||||||
|
|
||||||
this.openpipe = {
|
|
||||||
cacheStatus: "MISS",
|
|
||||||
reportingFinished,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async *[Symbol.asyncIterator](): AsyncIterator<ChatCompletionChunk, any, undefined> {
|
|
||||||
const iterator = super[Symbol.asyncIterator]();
|
|
||||||
|
|
||||||
let combinedResponse: ChatCompletion | null = null;
|
|
||||||
while (true) {
|
|
||||||
const result = await iterator.next();
|
|
||||||
if (result.done) break;
|
|
||||||
combinedResponse = mergeChunks(combinedResponse, result.value);
|
|
||||||
|
|
||||||
yield result.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.report(combinedResponse);
|
|
||||||
|
|
||||||
// Resolve the promise here
|
|
||||||
this.resolveReportingFinished();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
import pkg from "../package.json";
|
import pkg from "../package.json";
|
||||||
import { DefaultService } from "./codegen";
|
|
||||||
|
|
||||||
export type OpenPipeConfig = {
|
export type OpenPipeConfig = {
|
||||||
apiKey?: string;
|
apiKey?: string;
|
||||||
@@ -16,11 +15,9 @@ export type OpenPipeMeta = {
|
|||||||
// We report your call to OpenPipe asynchronously in the background. If you
|
// We report your call to OpenPipe asynchronously in the background. If you
|
||||||
// need to wait until the report is sent to take further action, you can await
|
// need to wait until the report is sent to take further action, you can await
|
||||||
// this promise.
|
// this promise.
|
||||||
reportingFinished: Promise<void>;
|
reportingFinished: Promise<void | { status: "ok" }>;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ReportFn = (...args: Parameters<DefaultService["report"]>) => Promise<void>;
|
|
||||||
|
|
||||||
export const getTags = (args: OpenPipeArgs["openpipe"]): Record<string, string> => ({
|
export const getTags = (args: OpenPipeArgs["openpipe"]): Record<string, string> => ({
|
||||||
...args?.tags,
|
...args?.tags,
|
||||||
...(args?.cache ? { $cache: args.cache?.toString() } : {}),
|
...(args?.cache ? { $cache: args.cache?.toString() } : {}),
|
||||||
|
|||||||
79
pnpm-lock.yaml
generated
79
pnpm-lock.yaml
generated
@@ -166,7 +166,7 @@ importers:
|
|||||||
version: 6.9.4
|
version: 6.9.4
|
||||||
openai:
|
openai:
|
||||||
specifier: 4.0.0-beta.7
|
specifier: 4.0.0-beta.7
|
||||||
version: 4.0.0-beta.7(encoding@0.1.13)
|
version: 4.0.0-beta.7
|
||||||
openpipe:
|
openpipe:
|
||||||
specifier: workspace:*
|
specifier: workspace:*
|
||||||
version: link:../client-libs/typescript
|
version: link:../client-libs/typescript
|
||||||
@@ -357,9 +357,6 @@ importers:
|
|||||||
|
|
||||||
client-libs/typescript:
|
client-libs/typescript:
|
||||||
dependencies:
|
dependencies:
|
||||||
encoding:
|
|
||||||
specifier: ^0.1.13
|
|
||||||
version: 0.1.13
|
|
||||||
form-data:
|
form-data:
|
||||||
specifier: ^4.0.0
|
specifier: ^4.0.0
|
||||||
version: 4.0.0
|
version: 4.0.0
|
||||||
@@ -367,11 +364,11 @@ importers:
|
|||||||
specifier: ^4.17.21
|
specifier: ^4.17.21
|
||||||
version: 4.17.21
|
version: 4.17.21
|
||||||
node-fetch:
|
node-fetch:
|
||||||
specifier: ^2.6.12
|
specifier: ^3.3.2
|
||||||
version: 2.6.12(encoding@0.1.13)
|
version: 3.3.2
|
||||||
openai-beta:
|
openai-beta:
|
||||||
specifier: npm:openai@4.0.0-beta.7
|
specifier: npm:openai@4.0.0-beta.7
|
||||||
version: /openai@4.0.0-beta.7(encoding@0.1.13)
|
version: /openai@4.0.0-beta.7
|
||||||
openai-legacy:
|
openai-legacy:
|
||||||
specifier: npm:openai@3.3.0
|
specifier: npm:openai@3.3.0
|
||||||
version: /openai@3.3.0
|
version: /openai@3.3.0
|
||||||
@@ -382,9 +379,6 @@ importers:
|
|||||||
'@types/node':
|
'@types/node':
|
||||||
specifier: ^20.4.8
|
specifier: ^20.4.8
|
||||||
version: 20.4.8
|
version: 20.4.8
|
||||||
'@types/node-fetch':
|
|
||||||
specifier: ^2.6.4
|
|
||||||
version: 2.6.4
|
|
||||||
dotenv:
|
dotenv:
|
||||||
specifier: ^16.3.1
|
specifier: ^16.3.1
|
||||||
version: 16.3.1
|
version: 16.3.1
|
||||||
@@ -422,7 +416,7 @@ packages:
|
|||||||
digest-fetch: 1.3.0
|
digest-fetch: 1.3.0
|
||||||
form-data-encoder: 1.7.2
|
form-data-encoder: 1.7.2
|
||||||
formdata-node: 4.4.1
|
formdata-node: 4.4.1
|
||||||
node-fetch: 2.6.12(encoding@0.1.13)
|
node-fetch: 2.6.12
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- encoding
|
- encoding
|
||||||
dev: false
|
dev: false
|
||||||
@@ -2696,7 +2690,7 @@ packages:
|
|||||||
dependencies:
|
dependencies:
|
||||||
https-proxy-agent: 5.0.1
|
https-proxy-agent: 5.0.1
|
||||||
mkdirp: 0.5.6
|
mkdirp: 0.5.6
|
||||||
node-fetch: 2.6.12(encoding@0.1.13)
|
node-fetch: 2.6.12
|
||||||
progress: 2.0.3
|
progress: 2.0.3
|
||||||
proxy-from-env: 1.1.0
|
proxy-from-env: 1.1.0
|
||||||
which: 2.0.2
|
which: 2.0.2
|
||||||
@@ -3186,6 +3180,7 @@ packages:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@types/node': 20.4.10
|
'@types/node': 20.4.10
|
||||||
form-data: 3.0.1
|
form-data: 3.0.1
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@types/node@18.16.0:
|
/@types/node@18.16.0:
|
||||||
resolution: {integrity: sha512-BsAaKhB+7X+H4GnSjGhJG9Qi8Tw+inU9nJDwmD5CgOmBLEI6ArdhikpLX7DjbjDRDTbqZzU2LSQNZg8WGPiSZQ==}
|
resolution: {integrity: sha512-BsAaKhB+7X+H4GnSjGhJG9Qi8Tw+inU9nJDwmD5CgOmBLEI6ArdhikpLX7DjbjDRDTbqZzU2LSQNZg8WGPiSZQ==}
|
||||||
@@ -3836,6 +3831,7 @@ packages:
|
|||||||
|
|
||||||
/asynckit@0.4.0:
|
/asynckit@0.4.0:
|
||||||
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
|
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/available-typed-arrays@1.0.5:
|
/available-typed-arrays@1.0.5:
|
||||||
resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==}
|
resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==}
|
||||||
@@ -4226,6 +4222,7 @@ packages:
|
|||||||
engines: {node: '>= 0.8'}
|
engines: {node: '>= 0.8'}
|
||||||
dependencies:
|
dependencies:
|
||||||
delayed-stream: 1.0.0
|
delayed-stream: 1.0.0
|
||||||
|
dev: false
|
||||||
|
|
||||||
/comma-separated-tokens@1.0.8:
|
/comma-separated-tokens@1.0.8:
|
||||||
resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==}
|
resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==}
|
||||||
@@ -4510,6 +4507,11 @@ packages:
|
|||||||
assert-plus: 1.0.0
|
assert-plus: 1.0.0
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/data-uri-to-buffer@4.0.1:
|
||||||
|
resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==}
|
||||||
|
engines: {node: '>= 12'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/date-fns@2.30.0:
|
/date-fns@2.30.0:
|
||||||
resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==}
|
resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==}
|
||||||
engines: {node: '>=0.11'}
|
engines: {node: '>=0.11'}
|
||||||
@@ -4593,6 +4595,7 @@ packages:
|
|||||||
/delayed-stream@1.0.0:
|
/delayed-stream@1.0.0:
|
||||||
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
|
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
|
||||||
engines: {node: '>=0.4.0'}
|
engines: {node: '>=0.4.0'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/depd@1.1.2:
|
/depd@1.1.2:
|
||||||
resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==}
|
resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==}
|
||||||
@@ -4726,12 +4729,6 @@ packages:
|
|||||||
engines: {node: '>= 0.8'}
|
engines: {node: '>= 0.8'}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/encoding@0.1.13:
|
|
||||||
resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==}
|
|
||||||
dependencies:
|
|
||||||
iconv-lite: 0.6.3
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/engine.io-client@6.5.2:
|
/engine.io-client@6.5.2:
|
||||||
resolution: {integrity: sha512-CQZqbrpEYnrpGqC07a9dJDz4gePZUgTPMU3NKJPSeQOyw27Tst4Pl3FemKoFGAlHzgZmKjoRmiJvbWfhCXUlIg==}
|
resolution: {integrity: sha512-CQZqbrpEYnrpGqC07a9dJDz4gePZUgTPMU3NKJPSeQOyw27Tst4Pl3FemKoFGAlHzgZmKjoRmiJvbWfhCXUlIg==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -5402,6 +5399,14 @@ packages:
|
|||||||
format: 0.2.2
|
format: 0.2.2
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/fetch-blob@3.2.0:
|
||||||
|
resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
|
||||||
|
engines: {node: ^12.20 || >= 14.13}
|
||||||
|
dependencies:
|
||||||
|
node-domexception: 1.0.0
|
||||||
|
web-streams-polyfill: 3.2.1
|
||||||
|
dev: false
|
||||||
|
|
||||||
/fflate@0.4.8:
|
/fflate@0.4.8:
|
||||||
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
|
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
|
||||||
dev: false
|
dev: false
|
||||||
@@ -5517,6 +5522,7 @@ packages:
|
|||||||
asynckit: 0.4.0
|
asynckit: 0.4.0
|
||||||
combined-stream: 1.0.8
|
combined-stream: 1.0.8
|
||||||
mime-types: 2.1.35
|
mime-types: 2.1.35
|
||||||
|
dev: false
|
||||||
|
|
||||||
/form-data@4.0.0:
|
/form-data@4.0.0:
|
||||||
resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==}
|
resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==}
|
||||||
@@ -5540,6 +5546,13 @@ packages:
|
|||||||
web-streams-polyfill: 4.0.0-beta.3
|
web-streams-polyfill: 4.0.0-beta.3
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/formdata-polyfill@4.0.10:
|
||||||
|
resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==}
|
||||||
|
engines: {node: '>=12.20.0'}
|
||||||
|
dependencies:
|
||||||
|
fetch-blob: 3.2.0
|
||||||
|
dev: false
|
||||||
|
|
||||||
/forwarded@0.2.0:
|
/forwarded@0.2.0:
|
||||||
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
|
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
|
||||||
engines: {node: '>= 0.6'}
|
engines: {node: '>= 0.6'}
|
||||||
@@ -5955,13 +5968,6 @@ packages:
|
|||||||
safer-buffer: 2.1.2
|
safer-buffer: 2.1.2
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/iconv-lite@0.6.3:
|
|
||||||
resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
|
|
||||||
engines: {node: '>=0.10.0'}
|
|
||||||
dependencies:
|
|
||||||
safer-buffer: 2.1.2
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/ignore@5.2.4:
|
/ignore@5.2.4:
|
||||||
resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==}
|
resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==}
|
||||||
engines: {node: '>= 4'}
|
engines: {node: '>= 4'}
|
||||||
@@ -6253,7 +6259,7 @@ packages:
|
|||||||
resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
|
resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
|
||||||
engines: {node: '>= 10.13.0'}
|
engines: {node: '>= 10.13.0'}
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/node': 20.4.10
|
'@types/node': 18.16.0
|
||||||
merge-stream: 2.0.0
|
merge-stream: 2.0.0
|
||||||
supports-color: 8.1.1
|
supports-color: 8.1.1
|
||||||
|
|
||||||
@@ -6853,7 +6859,7 @@ packages:
|
|||||||
engines: {node: '>=10.5.0'}
|
engines: {node: '>=10.5.0'}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/node-fetch@2.6.12(encoding@0.1.13):
|
/node-fetch@2.6.12:
|
||||||
resolution: {integrity: sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==}
|
resolution: {integrity: sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==}
|
||||||
engines: {node: 4.x || >=6.0.0}
|
engines: {node: 4.x || >=6.0.0}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
@@ -6862,10 +6868,18 @@ packages:
|
|||||||
encoding:
|
encoding:
|
||||||
optional: true
|
optional: true
|
||||||
dependencies:
|
dependencies:
|
||||||
encoding: 0.1.13
|
|
||||||
whatwg-url: 5.0.0
|
whatwg-url: 5.0.0
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/node-fetch@3.3.2:
|
||||||
|
resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==}
|
||||||
|
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
|
||||||
|
dependencies:
|
||||||
|
data-uri-to-buffer: 4.0.1
|
||||||
|
fetch-blob: 3.2.0
|
||||||
|
formdata-polyfill: 4.0.10
|
||||||
|
dev: false
|
||||||
|
|
||||||
/node-mocks-http@1.12.2:
|
/node-mocks-http@1.12.2:
|
||||||
resolution: {integrity: sha512-xhWwC0dh35R9rf0j3bRZXuISXdHxxtMx0ywZQBwjrg3yl7KpRETzogfeCamUIjltpn0Fxvs/ZhGJul1vPLrdJQ==}
|
resolution: {integrity: sha512-xhWwC0dh35R9rf0j3bRZXuISXdHxxtMx0ywZQBwjrg3yl7KpRETzogfeCamUIjltpn0Fxvs/ZhGJul1vPLrdJQ==}
|
||||||
engines: {node: '>=0.6'}
|
engines: {node: '>=0.6'}
|
||||||
@@ -7013,7 +7027,7 @@ packages:
|
|||||||
- debug
|
- debug
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/openai@4.0.0-beta.7(encoding@0.1.13):
|
/openai@4.0.0-beta.7:
|
||||||
resolution: {integrity: sha512-jHjwvpMuGkNxiQ3erwLZsOvPEhcVrMtwtfNeYmGCjhbdB+oStVw/7pIhIPkualu8rlhLwgMR7awknIaN3IQcOA==}
|
resolution: {integrity: sha512-jHjwvpMuGkNxiQ3erwLZsOvPEhcVrMtwtfNeYmGCjhbdB+oStVw/7pIhIPkualu8rlhLwgMR7awknIaN3IQcOA==}
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/node': 18.16.0
|
'@types/node': 18.16.0
|
||||||
@@ -7023,7 +7037,7 @@ packages:
|
|||||||
digest-fetch: 1.3.0
|
digest-fetch: 1.3.0
|
||||||
form-data-encoder: 1.7.2
|
form-data-encoder: 1.7.2
|
||||||
formdata-node: 4.4.1
|
formdata-node: 4.4.1
|
||||||
node-fetch: 2.6.12(encoding@0.1.13)
|
node-fetch: 2.6.12
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- encoding
|
- encoding
|
||||||
dev: false
|
dev: false
|
||||||
@@ -9123,6 +9137,11 @@ packages:
|
|||||||
glob-to-regexp: 0.4.1
|
glob-to-regexp: 0.4.1
|
||||||
graceful-fs: 4.2.11
|
graceful-fs: 4.2.11
|
||||||
|
|
||||||
|
/web-streams-polyfill@3.2.1:
|
||||||
|
resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==}
|
||||||
|
engines: {node: '>= 8'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/web-streams-polyfill@4.0.0-beta.3:
|
/web-streams-polyfill@4.0.0-beta.3:
|
||||||
resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==}
|
resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==}
|
||||||
engines: {node: '>= 14'}
|
engines: {node: '>= 14'}
|
||||||
|
|||||||
14
render.yaml
14
render.yaml
@@ -7,7 +7,7 @@ databases:
|
|||||||
services:
|
services:
|
||||||
- type: web
|
- type: web
|
||||||
name: querykey-prod-web
|
name: querykey-prod-web
|
||||||
runtime: docker
|
env: docker
|
||||||
dockerfilePath: ./app/Dockerfile
|
dockerfilePath: ./app/Dockerfile
|
||||||
dockerContext: .
|
dockerContext: .
|
||||||
plan: standard
|
plan: standard
|
||||||
@@ -21,6 +21,8 @@ services:
|
|||||||
name: querykey-prod
|
name: querykey-prod
|
||||||
property: connectionString
|
property: connectionString
|
||||||
- fromGroup: querykey-prod
|
- fromGroup: querykey-prod
|
||||||
|
- key: NEXT_PUBLIC_SOCKET_URL
|
||||||
|
value: https://querykey-prod-wss.onrender.com
|
||||||
# Render support says we need to manually set this because otherwise
|
# Render support says we need to manually set this because otherwise
|
||||||
# sometimes it checks a different random port that NextJS opens for
|
# sometimes it checks a different random port that NextJS opens for
|
||||||
# liveness and the liveness check fails.
|
# liveness and the liveness check fails.
|
||||||
@@ -29,16 +31,8 @@ services:
|
|||||||
|
|
||||||
- type: web
|
- type: web
|
||||||
name: querykey-prod-wss
|
name: querykey-prod-wss
|
||||||
runtime: docker
|
env: docker
|
||||||
dockerfilePath: ./app/Dockerfile
|
dockerfilePath: ./app/Dockerfile
|
||||||
dockerContext: .
|
dockerContext: .
|
||||||
plan: free
|
plan: free
|
||||||
dockerCommand: pnpm tsx src/wss-server.ts
|
dockerCommand: pnpm tsx src/wss-server.ts
|
||||||
|
|
||||||
- type: worker
|
|
||||||
name: querykey-prod-worker
|
|
||||||
runtime: docker
|
|
||||||
dockerfilePath: ./app/Dockerfile
|
|
||||||
dockerContext: .
|
|
||||||
plan: starter
|
|
||||||
dockerCommand: pnpm tsx src/server/tasks/worker.ts
|
|
||||||
|
|||||||
Reference in New Issue
Block a user