Compare commits
19 Commits
hide-model
...
publish-py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83d71c6e9d | ||
|
|
1693ac1c58 | ||
|
|
8de0c0fc5a | ||
|
|
3ed390c941 | ||
|
|
fa16dd61dc | ||
|
|
cb73598148 | ||
|
|
2f01e53cf3 | ||
|
|
5b8113d8e7 | ||
|
|
96a589e401 | ||
|
|
16354d83df | ||
|
|
6a5afd0c9b | ||
|
|
1684663ddc | ||
|
|
70fae68225 | ||
|
|
518c8620d0 | ||
|
|
ab87794192 | ||
|
|
48aa697002 | ||
|
|
55f2be861e | ||
|
|
fa87887e91 | ||
|
|
28713fb3ef |
14
.github/ISSUE_TEMPLATE/sweep-fast-template.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Sweep Fast Issue
|
||||
title: 'Sweep (fast): '
|
||||
description: For few-line fixes to be handled by Sweep, an AI-powered junior developer. Sweep will use GPT-3.5 to quickly create a PR for very small changes.
|
||||
labels: sweep
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Details
|
||||
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
|
||||
placeholder: |
|
||||
Bugs: The bug might be in ... file. Here are the logs: ...
|
||||
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
|
||||
Refactors: We are migrating this function to ... version because ...
|
||||
14
.github/ISSUE_TEMPLATE/sweep-slow-template.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Sweep Slow Issue
|
||||
title: 'Sweep (slow): '
|
||||
description: For larger bugs, features, refactors, and tests to be handled by Sweep, an AI-powered junior developer. Sweep will perform a deeper search and more self-reviews but will take longer.
|
||||
labels: sweep
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Details
|
||||
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
|
||||
placeholder: |
|
||||
Bugs: The bug might be in ... file. Here are the logs: ...
|
||||
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
|
||||
Refactors: We are migrating this function to ... version because ...
|
||||
14
.github/ISSUE_TEMPLATE/sweep-template.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Sweep Issue
|
||||
title: 'Sweep: '
|
||||
description: For small bugs, features, refactors, and tests to be handled by Sweep, an AI-powered junior developer.
|
||||
labels: sweep
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Details
|
||||
description: Tell Sweep where and what to edit and provide enough context for a new developer to the codebase
|
||||
placeholder: |
|
||||
Bugs: The bug might be in ... file. Here are the logs: ...
|
||||
Features: the new endpoint should use the ... class from ... file because it contains ... logic.
|
||||
Refactors: We are migrating this function to ... version because ...
|
||||
@@ -16,6 +16,7 @@
|
||||
<a href='http://makeapullrequest.com'><img alt='PRs Welcome' src='https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square'/></a>
|
||||
<a href="https://github.com/openpipe/openpipe/graphs/commit-activity"><img alt="GitHub commit activity" src="https://img.shields.io/github/commit-activity/m/openpipe/openpipe?style=flat-square"/></a>
|
||||
<a href="https://github.com/openpipe/openpipe/issues"><img alt="GitHub closed issues" src="https://img.shields.io/github/issues-closed/openpipe/openpipe?style=flat-square"/></a>
|
||||
<img src="https://img.shields.io/badge/Y%20Combinator-S23-orange?style=flat-square" alt="Y Combinator S23">
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
@@ -27,7 +28,7 @@ Use powerful but expensive LLMs to fine-tune smaller and cheaper models suited t
|
||||
<br>
|
||||
|
||||
|
||||
## 🪛 Features
|
||||
## Features
|
||||
|
||||
* <b>Experiment</b>
|
||||
* Bulk-test wide-reaching scenarios using code templating.
|
||||
|
||||
@@ -79,7 +79,8 @@
|
||||
"nextjs-routes": "^2.0.1",
|
||||
"nodemailer": "^6.9.4",
|
||||
"openai": "4.0.0-beta.7",
|
||||
"openpipe": "workspace:*",
|
||||
"openpipe": "^0.3.0",
|
||||
"openpipe-dev": "workspace:^",
|
||||
"pg": "^8.11.2",
|
||||
"pluralize": "^8.0.0",
|
||||
"posthog-js": "^1.75.3",
|
||||
|
||||
12
app/prisma/deleteOneFineTune.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { prisma } from "~/server/db";
|
||||
|
||||
// delete most recent fineTune
|
||||
const mostRecentFineTune = await prisma.fineTune.findFirst({
|
||||
orderBy: { createdAt: "desc" },
|
||||
});
|
||||
|
||||
if (mostRecentFineTune) {
|
||||
await prisma.fineTune.delete({
|
||||
where: { id: mostRecentFineTune.id },
|
||||
});
|
||||
}
|
||||
@@ -80,7 +80,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
},
|
||||
respStatus: 200,
|
||||
respPayload: {
|
||||
id: "chatcmpl-7lNspqePJWVyXwXebupxb1eMozo6Q",
|
||||
id: "chatcmpl-7",
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
usage: {
|
||||
total_tokens: 241,
|
||||
@@ -108,7 +108,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
inputTokens: 236,
|
||||
outputTokens: 5,
|
||||
finishReason: "stop",
|
||||
tags: [],
|
||||
tags: [{ name: "prompt_id", value: "define_func" }],
|
||||
},
|
||||
{
|
||||
reqPayload: {
|
||||
@@ -167,7 +167,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
},
|
||||
respStatus: 200,
|
||||
respPayload: {
|
||||
id: "chatcmpl-7lNifmc5AncyAvleZRDBhAcLFYBIT",
|
||||
id: "chatcmpl-7",
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
usage: {
|
||||
total_tokens: 227,
|
||||
@@ -210,7 +210,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
},
|
||||
respStatus: 200,
|
||||
respPayload: {
|
||||
id: "chatcmpl-7lNh1TtrsJVgz3Nj70bKkZZk7xPi7",
|
||||
id: "chatcmpl-7",
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
usage: {
|
||||
total_tokens: 21,
|
||||
@@ -234,7 +234,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
inputTokens: 14,
|
||||
outputTokens: 7,
|
||||
finishReason: "stop",
|
||||
tags: [{ name: "prompt_id", value: "id2" }],
|
||||
tags: [{ name: "prompt_id", value: "translate_text" }],
|
||||
},
|
||||
{
|
||||
reqPayload: {
|
||||
@@ -281,7 +281,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
},
|
||||
respStatus: 200,
|
||||
respPayload: {
|
||||
id: "chatcmpl-7lQS3MktOT8BTgNEytl9dkyssCQqL",
|
||||
id: "chatcmpl-7",
|
||||
model: "gpt-4-0613",
|
||||
usage: {
|
||||
total_tokens: 2910,
|
||||
@@ -311,7 +311,7 @@ const MODEL_RESPONSE_TEMPLATES: {
|
||||
outputTokens: 108,
|
||||
finishReason: "stop",
|
||||
tags: [
|
||||
{ name: "prompt_id", value: "chatcmpl-7lQS3MktOT8BTgNEytl9dkyssCQqL" },
|
||||
{ name: "prompt_id", value: "chatcmpl-7" },
|
||||
{ name: "some_other_tag", value: "some_other_value" },
|
||||
],
|
||||
},
|
||||
@@ -339,7 +339,7 @@ const loggedCallsToCreate: Prisma.LoggedCallCreateManyInput[] = [];
|
||||
const loggedCallModelResponsesToCreate: Prisma.LoggedCallModelResponseCreateManyInput[] = [];
|
||||
const loggedCallsToUpdate: Prisma.LoggedCallUpdateArgs[] = [];
|
||||
const loggedCallTagsToCreate: Prisma.LoggedCallTagCreateManyInput[] = [];
|
||||
for (let i = 0; i < 1437; i++) {
|
||||
for (let i = 0; i < 11437; i++) {
|
||||
const loggedCallId = uuidv4();
|
||||
const loggedCallModelResponseId = uuidv4();
|
||||
const template =
|
||||
|
||||
@@ -13,19 +13,17 @@ import {
|
||||
Link,
|
||||
} from "@chakra-ui/react";
|
||||
import { BsStars } from "react-icons/bs";
|
||||
import { useRouter } from "next/router";
|
||||
import { useSession } from "next-auth/react";
|
||||
|
||||
export const BetaModal = () => {
|
||||
const router = useRouter();
|
||||
export const BetaModal = ({ isOpen, onClose }: { isOpen: boolean; onClose: () => void }) => {
|
||||
const session = useSession();
|
||||
|
||||
const email = session.data?.user.email ?? "";
|
||||
|
||||
return (
|
||||
<Modal
|
||||
isOpen
|
||||
onClose={router.back}
|
||||
isOpen={isOpen}
|
||||
onClose={onClose}
|
||||
closeOnOverlayClick={false}
|
||||
size={{ base: "xl", md: "2xl" }}
|
||||
>
|
||||
@@ -56,7 +54,7 @@ export const BetaModal = () => {
|
||||
>
|
||||
Join Waitlist
|
||||
</Button>
|
||||
<Button colorScheme="blue" onClick={router.back}>
|
||||
<Button colorScheme="blue" onClick={onClose}>
|
||||
Done
|
||||
</Button>
|
||||
</HStack>
|
||||
@@ -1,3 +1,4 @@
|
||||
import { useState, useMemo, useCallback } from "react";
|
||||
import {
|
||||
Button,
|
||||
HStack,
|
||||
@@ -14,16 +15,18 @@ import {
|
||||
VStack,
|
||||
} from "@chakra-ui/react";
|
||||
import { type PromptVariant } from "@prisma/client";
|
||||
import { isObject, isString } from "lodash-es";
|
||||
import { useState } from "react";
|
||||
import { isString } from "lodash-es";
|
||||
import { RiExchangeFundsFill } from "react-icons/ri";
|
||||
|
||||
import { type ProviderModel } from "~/modelProviders/types";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment, useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { lookupModel, modelLabel } from "~/utils/utils";
|
||||
import CompareFunctions from "../RefinePromptModal/CompareFunctions";
|
||||
import { ModelSearch } from "./ModelSearch";
|
||||
import { ModelStatsCard } from "./ModelStatsCard";
|
||||
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
export const ChangeModelModal = ({
|
||||
variant,
|
||||
@@ -32,48 +35,43 @@ export const ChangeModelModal = ({
|
||||
variant: PromptVariant;
|
||||
onClose: () => void;
|
||||
}) => {
|
||||
const editorOptionsMap = useAppStore((s) => s.sharedVariantEditor.editorOptionsMap);
|
||||
const originalPromptFn = useMemo(
|
||||
() => editorOptionsMap[variant.uiId]?.getContent() || "",
|
||||
[editorOptionsMap, variant.uiId],
|
||||
);
|
||||
|
||||
const originalModel = lookupModel(variant.modelProvider, variant.model);
|
||||
const [selectedModel, setSelectedModel] = useState({
|
||||
provider: variant.modelProvider,
|
||||
model: variant.model,
|
||||
} as ProviderModel);
|
||||
const [convertedModel, setConvertedModel] = useState<ProviderModel | undefined>();
|
||||
const visibleScenarios = useVisibleScenarioIds();
|
||||
|
||||
const utils = api.useContext();
|
||||
const [modifiedPromptFn, setModifiedPromptFn] = useState<string>();
|
||||
|
||||
const experiment = useExperiment();
|
||||
|
||||
const { mutateAsync: getModifiedPromptMutateAsync, data: modifiedPromptFn } =
|
||||
const { mutateAsync: getModifiedPromptMutateAsync } =
|
||||
api.promptVariants.getModifiedPromptFn.useMutation();
|
||||
|
||||
const [getModifiedPromptFn, modificationInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (!experiment) return;
|
||||
|
||||
await getModifiedPromptMutateAsync({
|
||||
const resp = await getModifiedPromptMutateAsync({
|
||||
id: variant.id,
|
||||
originalPromptFn,
|
||||
newModel: selectedModel,
|
||||
});
|
||||
if (maybeReportError(resp)) return;
|
||||
setModifiedPromptFn(resp.payload);
|
||||
setConvertedModel(selectedModel);
|
||||
}, [getModifiedPromptMutateAsync, onClose, experiment, variant, selectedModel]);
|
||||
|
||||
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
|
||||
|
||||
const [replaceVariant, replacementInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (
|
||||
!variant.experimentId ||
|
||||
!modifiedPromptFn ||
|
||||
(isObject(modifiedPromptFn) && "status" in modifiedPromptFn)
|
||||
)
|
||||
return;
|
||||
await replaceVariantMutation.mutateAsync({
|
||||
id: variant.id,
|
||||
promptConstructor: modifiedPromptFn,
|
||||
streamScenarios: visibleScenarios,
|
||||
});
|
||||
await utils.promptVariants.list.invalidate();
|
||||
const replaceVariant = useCallback(() => {
|
||||
if (!modifiedPromptFn) return;
|
||||
editorOptionsMap[variant.uiId]?.setContent(modifiedPromptFn);
|
||||
onClose();
|
||||
}, [replaceVariantMutation, variant, onClose, modifiedPromptFn]);
|
||||
}, [variant.uiId, editorOptionsMap, onClose, modifiedPromptFn]);
|
||||
|
||||
const originalLabel = modelLabel(variant.modelProvider, variant.model);
|
||||
const selectedLabel = modelLabel(selectedModel.provider, selectedModel.model);
|
||||
@@ -130,9 +128,9 @@ export const ChangeModelModal = ({
|
||||
colorScheme="blue"
|
||||
onClick={replaceVariant}
|
||||
minW={24}
|
||||
isDisabled={!convertedModel || modificationInProgress || replacementInProgress}
|
||||
isDisabled={!convertedModel || modificationInProgress}
|
||||
>
|
||||
{replacementInProgress ? <Spinner boxSize={4} /> : <Text>Accept</Text>}
|
||||
Accept
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
|
||||
@@ -1,74 +1,41 @@
|
||||
import {
|
||||
Button,
|
||||
Icon,
|
||||
AlertDialog,
|
||||
AlertDialogBody,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogContent,
|
||||
AlertDialogOverlay,
|
||||
useDisclosure,
|
||||
Text,
|
||||
} from "@chakra-ui/react";
|
||||
|
||||
import { Button, Icon, useDisclosure, Text } from "@chakra-ui/react";
|
||||
import { useRouter } from "next/router";
|
||||
import { useRef } from "react";
|
||||
import { BsTrash } from "react-icons/bs";
|
||||
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { api } from "~/utils/api";
|
||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import DeleteExperimentDialog from "../experiments/DeleteExperimentDialog";
|
||||
|
||||
export const DeleteButton = () => {
|
||||
const experiment = useExperiment();
|
||||
const mutation = api.experiments.delete.useMutation();
|
||||
const utils = api.useContext();
|
||||
const router = useRouter();
|
||||
|
||||
const disclosure = useDisclosure();
|
||||
|
||||
const closeDrawer = useAppStore((s) => s.closeDrawer);
|
||||
|
||||
const { isOpen, onOpen, onClose } = useDisclosure();
|
||||
const cancelRef = useRef<HTMLButtonElement>(null);
|
||||
|
||||
const [onDeleteConfirm] = useHandledAsyncCallback(async () => {
|
||||
if (!experiment.data?.id) return;
|
||||
await mutation.mutateAsync({ id: experiment.data.id });
|
||||
await utils.experiments.list.invalidate();
|
||||
const [onDelete] = useHandledAsyncCallback(async () => {
|
||||
await router.push({ pathname: "/experiments" });
|
||||
closeDrawer();
|
||||
|
||||
onClose();
|
||||
}, [mutation, experiment.data?.id, router]);
|
||||
}, [router, closeDrawer]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Button size="sm" variant="ghost" colorScheme="red" fontWeight="normal" onClick={onOpen}>
|
||||
<Button
|
||||
size="sm"
|
||||
variant="ghost"
|
||||
colorScheme="red"
|
||||
fontWeight="normal"
|
||||
onClick={disclosure.onOpen}
|
||||
>
|
||||
<Icon as={BsTrash} boxSize={4} />
|
||||
<Text ml={2}>Delete Experiment</Text>
|
||||
</Button>
|
||||
|
||||
<AlertDialog isOpen={isOpen} leastDestructiveRef={cancelRef} onClose={onClose}>
|
||||
<AlertDialogOverlay>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader fontSize="lg" fontWeight="bold">
|
||||
Delete Experiment
|
||||
</AlertDialogHeader>
|
||||
|
||||
<AlertDialogBody>
|
||||
If you delete this experiment all the associated prompts and scenarios will be deleted
|
||||
as well. Are you sure?
|
||||
</AlertDialogBody>
|
||||
|
||||
<AlertDialogFooter>
|
||||
<Button ref={cancelRef} onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button colorScheme="red" onClick={onDeleteConfirm} ml={3}>
|
||||
Delete
|
||||
</Button>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialogOverlay>
|
||||
</AlertDialog>
|
||||
<DeleteExperimentDialog
|
||||
experimentId={experiment.data?.id}
|
||||
onDelete={onDelete}
|
||||
disclosure={disclosure}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
} from "@chakra-ui/react";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { FiMaximize, FiMinimize } from "react-icons/fi";
|
||||
import { editorBackground } from "~/state/sharedVariantEditor.slice";
|
||||
import { type CreatedEditor, editorBackground } from "~/state/sharedVariantEditor.slice";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { api } from "~/utils/api";
|
||||
import {
|
||||
@@ -24,8 +24,10 @@ import { type PromptVariant } from "./types";
|
||||
export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||
const { canModify } = useExperimentAccess();
|
||||
const monaco = useAppStore.use.sharedVariantEditor.monaco();
|
||||
const editorRef = useRef<ReturnType<NonNullable<typeof monaco>["editor"]["create"]> | null>(null);
|
||||
const updateOptionsForEditor = useAppStore.use.sharedVariantEditor.updateOptionsForEditor();
|
||||
const editorRef = useRef<CreatedEditor | null>(null);
|
||||
const containerRef = useRef<HTMLDivElement | null>(null);
|
||||
const lastSavedFnRef = useRef(props.variant.promptConstructor);
|
||||
const [editorId] = useState(() => `editor_${Math.random().toString(36).substring(7)}`);
|
||||
const [isChanged, setIsChanged] = useState(false);
|
||||
|
||||
@@ -48,22 +50,18 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||
}, [isFullscreen, toggleFullscreen]);
|
||||
|
||||
const lastSavedFn = props.variant.promptConstructor;
|
||||
useEffect(() => {
|
||||
// Store in ref so that we can access it dynamically
|
||||
lastSavedFnRef.current = lastSavedFn;
|
||||
}, [lastSavedFn]);
|
||||
|
||||
const modifierKey = useModifierKeyLabel();
|
||||
|
||||
const checkForChanges = useCallback(() => {
|
||||
if (!editorRef.current) return;
|
||||
const currentFn = editorRef.current.getValue();
|
||||
setIsChanged(currentFn.length > 0 && currentFn !== lastSavedFn);
|
||||
}, [lastSavedFn]);
|
||||
|
||||
const matchUpdatedSavedFn = useCallback(() => {
|
||||
if (!editorRef.current) return;
|
||||
editorRef.current.setValue(lastSavedFn);
|
||||
setIsChanged(false);
|
||||
}, [lastSavedFn]);
|
||||
|
||||
useEffect(matchUpdatedSavedFn, [matchUpdatedSavedFn, lastSavedFn]);
|
||||
setIsChanged(currentFn.length > 0 && currentFn !== lastSavedFnRef.current);
|
||||
}, [editorRef]);
|
||||
|
||||
const replaceVariant = api.promptVariants.replaceVariant.useMutation();
|
||||
const utils = api.useContext();
|
||||
@@ -136,6 +134,11 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||
readOnly: !canModify,
|
||||
});
|
||||
|
||||
updateOptionsForEditor(props.variant.uiId, {
|
||||
getContent: () => editorRef.current?.getValue() || "",
|
||||
setContent: (content) => editorRef.current?.setValue(content),
|
||||
});
|
||||
|
||||
// Workaround because otherwise the commands only work on whatever
|
||||
// editor was loaded on the page last.
|
||||
// https://github.com/microsoft/monaco-editor/issues/2947#issuecomment-1422265201
|
||||
@@ -155,7 +158,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||
});
|
||||
});
|
||||
|
||||
editorRef.current.onDidChangeModelContent(checkForChanges);
|
||||
const checkForChangesListener = editorRef.current.onDidChangeModelContent(checkForChanges);
|
||||
|
||||
const resizeObserver = new ResizeObserver(() => {
|
||||
editorRef.current?.layout();
|
||||
@@ -164,6 +167,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||
|
||||
return () => {
|
||||
resizeObserver.disconnect();
|
||||
checkForChangesListener.dispose();
|
||||
editorRef.current?.dispose();
|
||||
};
|
||||
}
|
||||
@@ -171,7 +175,7 @@ export default function VariantEditor(props: { variant: PromptVariant }) {
|
||||
// We intentionally skip the onSave and props.savedConfig dependencies here because
|
||||
// we don't want to re-render the editor from scratch
|
||||
/* eslint-disable-next-line react-hooks/exhaustive-deps */
|
||||
}, [monaco, editorId]);
|
||||
}, [monaco, editorId, updateOptionsForEditor]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!editorRef.current) return;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { useState, useMemo, useCallback } from "react";
|
||||
import {
|
||||
Button,
|
||||
Modal,
|
||||
@@ -9,22 +10,23 @@ import {
|
||||
ModalOverlay,
|
||||
VStack,
|
||||
Text,
|
||||
Spinner,
|
||||
HStack,
|
||||
Icon,
|
||||
SimpleGrid,
|
||||
} from "@chakra-ui/react";
|
||||
import { BsStars } from "react-icons/bs";
|
||||
import { api } from "~/utils/api";
|
||||
import { useHandledAsyncCallback, useVisibleScenarioIds } from "~/utils/hooks";
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { type PromptVariant } from "@prisma/client";
|
||||
import { useState } from "react";
|
||||
|
||||
import CompareFunctions from "./CompareFunctions";
|
||||
import { CustomInstructionsInput } from "../CustomInstructionsInput";
|
||||
import { RefineAction } from "./RefineAction";
|
||||
import { isObject, isString } from "lodash-es";
|
||||
import { isString } from "lodash-es";
|
||||
import { type RefinementAction, type SupportedProvider } from "~/modelProviders/types";
|
||||
import frontendModelProviders from "~/modelProviders/frontendModelProviders";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { maybeReportError } from "~/utils/errorHandling/maybeReportError";
|
||||
|
||||
export const RefinePromptModal = ({
|
||||
variant,
|
||||
@@ -33,19 +35,23 @@ export const RefinePromptModal = ({
|
||||
variant: PromptVariant;
|
||||
onClose: () => void;
|
||||
}) => {
|
||||
const utils = api.useContext();
|
||||
const visibleScenarios = useVisibleScenarioIds();
|
||||
const editorOptionsMap = useAppStore((s) => s.sharedVariantEditor.editorOptionsMap);
|
||||
const originalPromptFn = useMemo(
|
||||
() => editorOptionsMap[variant.uiId]?.getContent() || "",
|
||||
[editorOptionsMap, variant.uiId],
|
||||
);
|
||||
|
||||
const refinementActions =
|
||||
frontendModelProviders[variant.modelProvider as SupportedProvider].refinementActions || {};
|
||||
|
||||
const { mutateAsync: getModifiedPromptMutateAsync, data: refinedPromptFn } =
|
||||
const { mutateAsync: getModifiedPromptMutateAsync } =
|
||||
api.promptVariants.getModifiedPromptFn.useMutation();
|
||||
const [instructions, setInstructions] = useState<string>("");
|
||||
|
||||
const [activeRefineActionLabel, setActiveRefineActionLabel] = useState<string | undefined>(
|
||||
undefined,
|
||||
);
|
||||
const [refinedPromptFn, setRefinedPromptFn] = useState<string>();
|
||||
|
||||
const [getModifiedPromptFn, modificationInProgress] = useHandledAsyncCallback(
|
||||
async (label?: string) => {
|
||||
@@ -54,31 +60,22 @@ export const RefinePromptModal = ({
|
||||
? (refinementActions[label] as RefinementAction).instructions
|
||||
: instructions;
|
||||
setActiveRefineActionLabel(label);
|
||||
await getModifiedPromptMutateAsync({
|
||||
const resp = await getModifiedPromptMutateAsync({
|
||||
id: variant.id,
|
||||
originalPromptFn,
|
||||
instructions: updatedInstructions,
|
||||
});
|
||||
if (maybeReportError(resp)) return;
|
||||
setRefinedPromptFn(resp.payload);
|
||||
},
|
||||
[getModifiedPromptMutateAsync, onClose, variant, instructions, setActiveRefineActionLabel],
|
||||
);
|
||||
|
||||
const replaceVariantMutation = api.promptVariants.replaceVariant.useMutation();
|
||||
|
||||
const [replaceVariant, replacementInProgress] = useHandledAsyncCallback(async () => {
|
||||
if (
|
||||
!variant.experimentId ||
|
||||
!refinedPromptFn ||
|
||||
(isObject(refinedPromptFn) && "status" in refinedPromptFn)
|
||||
)
|
||||
return;
|
||||
await replaceVariantMutation.mutateAsync({
|
||||
id: variant.id,
|
||||
promptConstructor: refinedPromptFn,
|
||||
streamScenarios: visibleScenarios,
|
||||
});
|
||||
await utils.promptVariants.list.invalidate();
|
||||
const replaceVariant = useCallback(() => {
|
||||
if (!refinedPromptFn) return;
|
||||
editorOptionsMap[variant.uiId]?.setContent(refinedPromptFn);
|
||||
onClose();
|
||||
}, [replaceVariantMutation, variant, onClose, refinedPromptFn]);
|
||||
}, [variant.uiId, editorOptionsMap, onClose, refinedPromptFn]);
|
||||
|
||||
return (
|
||||
<Modal
|
||||
@@ -126,7 +123,7 @@ export const RefinePromptModal = ({
|
||||
/>
|
||||
</VStack>
|
||||
<CompareFunctions
|
||||
originalFunction={variant.promptConstructor}
|
||||
originalFunction={originalPromptFn}
|
||||
newFunction={isString(refinedPromptFn) ? refinedPromptFn : undefined}
|
||||
maxH="40vh"
|
||||
/>
|
||||
@@ -139,9 +136,9 @@ export const RefinePromptModal = ({
|
||||
colorScheme="blue"
|
||||
onClick={replaceVariant}
|
||||
minW={24}
|
||||
isDisabled={replacementInProgress || !refinedPromptFn}
|
||||
isDisabled={!refinedPromptFn}
|
||||
>
|
||||
{replacementInProgress ? <Spinner boxSize={4} /> : <Text>Accept</Text>}
|
||||
Accept
|
||||
</Button>
|
||||
</HStack>
|
||||
</ModalFooter>
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
import { VStack, HStack, type StackProps, Text, Divider } from "@chakra-ui/react";
|
||||
import Link, { type LinkProps } from "next/link";
|
||||
|
||||
const StatsCard = ({
|
||||
title,
|
||||
href,
|
||||
children,
|
||||
...rest
|
||||
}: { title: string; href: string } & StackProps & LinkProps) => {
|
||||
return (
|
||||
<VStack flex={1} borderWidth={1} padding={4} borderRadius={4} borderColor="gray.300" {...rest}>
|
||||
<HStack w="full" justifyContent="space-between">
|
||||
<Text fontSize="md" fontWeight="bold">
|
||||
{title}
|
||||
</Text>
|
||||
<Link href={href}>
|
||||
<Text color="blue">View all</Text>
|
||||
</Link>
|
||||
</HStack>
|
||||
<Divider />
|
||||
{children}
|
||||
</VStack>
|
||||
);
|
||||
};
|
||||
|
||||
export default StatsCard;
|
||||
@@ -2,11 +2,12 @@ import { Card, CardHeader, Heading, Table, Tbody, HStack, Button, Text } from "@
|
||||
import { useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { useLoggedCalls } from "~/utils/hooks";
|
||||
import { TableHeader, TableRow } from "../requestLogs/TableRow";
|
||||
import { EmptyTableRow, TableHeader, TableRow } from "../requestLogs/TableRow";
|
||||
|
||||
export default function LoggedCallsTable() {
|
||||
const { data: loggedCalls } = useLoggedCalls(false);
|
||||
|
||||
const [expandedRow, setExpandedRow] = useState<string | null>(null);
|
||||
const { data: loggedCalls } = useLoggedCalls();
|
||||
|
||||
return (
|
||||
<Card width="100%" overflow="hidden">
|
||||
@@ -23,22 +24,26 @@ export default function LoggedCallsTable() {
|
||||
<Table>
|
||||
<TableHeader />
|
||||
<Tbody>
|
||||
{loggedCalls?.calls.map((loggedCall) => {
|
||||
return (
|
||||
<TableRow
|
||||
key={loggedCall.id}
|
||||
loggedCall={loggedCall}
|
||||
isExpanded={loggedCall.id === expandedRow}
|
||||
onToggle={() => {
|
||||
if (loggedCall.id === expandedRow) {
|
||||
setExpandedRow(null);
|
||||
} else {
|
||||
setExpandedRow(loggedCall.id);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{loggedCalls?.calls.length ? (
|
||||
loggedCalls?.calls.map((loggedCall) => {
|
||||
return (
|
||||
<TableRow
|
||||
key={loggedCall.id}
|
||||
loggedCall={loggedCall}
|
||||
isExpanded={loggedCall.id === expandedRow}
|
||||
onToggle={() => {
|
||||
if (loggedCall.id === expandedRow) {
|
||||
setExpandedRow(null);
|
||||
} else {
|
||||
setExpandedRow(loggedCall.id);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
);
|
||||
})
|
||||
) : (
|
||||
<EmptyTableRow filtersApplied={false} />
|
||||
)}
|
||||
</Tbody>
|
||||
</Table>
|
||||
</Card>
|
||||
|
||||
66
app/src/components/experiments/DeleteExperimentDialog.tsx
Normal file
@@ -0,0 +1,66 @@
|
||||
import { useRef } from "react";
|
||||
import {
|
||||
type UseDisclosureReturn,
|
||||
AlertDialog,
|
||||
AlertDialogOverlay,
|
||||
AlertDialogContent,
|
||||
AlertDialogHeader,
|
||||
AlertDialogBody,
|
||||
AlertDialogFooter,
|
||||
Button,
|
||||
} from "@chakra-ui/react";
|
||||
import { api } from "~/utils/api";
|
||||
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
|
||||
const DeleteExperimentDialog = ({
|
||||
experimentId,
|
||||
onDelete,
|
||||
disclosure,
|
||||
}: {
|
||||
experimentId?: string;
|
||||
onDelete?: () => void;
|
||||
disclosure: UseDisclosureReturn;
|
||||
}) => {
|
||||
const cancelRef = useRef<HTMLButtonElement>(null);
|
||||
|
||||
const mutation = api.experiments.delete.useMutation();
|
||||
const utils = api.useContext();
|
||||
|
||||
const [onDeleteConfirm] = useHandledAsyncCallback(async () => {
|
||||
if (!experimentId) return;
|
||||
await mutation.mutateAsync({ id: experimentId });
|
||||
await utils.experiments.list.invalidate();
|
||||
onDelete?.();
|
||||
|
||||
disclosure.onClose();
|
||||
}, [mutation, experimentId, disclosure.onClose]);
|
||||
|
||||
return (
|
||||
<AlertDialog leastDestructiveRef={cancelRef} {...disclosure}>
|
||||
<AlertDialogOverlay>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader fontSize="lg" fontWeight="bold">
|
||||
Delete Experiment
|
||||
</AlertDialogHeader>
|
||||
|
||||
<AlertDialogBody>
|
||||
If you delete this experiment all the associated prompts and scenarios will be deleted
|
||||
as well. Are you sure?
|
||||
</AlertDialogBody>
|
||||
|
||||
<AlertDialogFooter>
|
||||
<Button ref={cancelRef} onClick={disclosure.onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button colorScheme="red" onClick={onDeleteConfirm} ml={3}>
|
||||
Delete
|
||||
</Button>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialogOverlay>
|
||||
</AlertDialog>
|
||||
);
|
||||
};
|
||||
|
||||
export default DeleteExperimentDialog;
|
||||
@@ -1,3 +1,4 @@
|
||||
import { type MouseEvent, useState } from "react";
|
||||
import {
|
||||
HStack,
|
||||
Icon,
|
||||
@@ -8,17 +9,29 @@ import {
|
||||
AspectRatio,
|
||||
SkeletonText,
|
||||
Card,
|
||||
useDisclosure,
|
||||
Box,
|
||||
Menu,
|
||||
MenuButton,
|
||||
MenuList,
|
||||
MenuItem,
|
||||
IconButton,
|
||||
useToast,
|
||||
} from "@chakra-ui/react";
|
||||
import { RiFlaskLine } from "react-icons/ri";
|
||||
import { formatTimePast } from "~/utils/dayjs";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/router";
|
||||
import { BsPlusSquare } from "react-icons/bs";
|
||||
import { RouterOutputs, api } from "~/utils/api";
|
||||
import { BsPlusSquare, BsThreeDotsVertical, BsLink45Deg, BsTrash } from "react-icons/bs";
|
||||
|
||||
import { formatTimePast } from "~/utils/dayjs";
|
||||
import { type RouterOutputs, api } from "~/utils/api";
|
||||
import { useHandledAsyncCallback } from "~/utils/hooks";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import DeleteExperimentDialog from "./DeleteExperimentDialog";
|
||||
|
||||
export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["list"][0] }) => {
|
||||
const [isMenuHovered, setIsMenuHovered] = useState(false);
|
||||
|
||||
return (
|
||||
<Card
|
||||
w="full"
|
||||
@@ -27,7 +40,7 @@ export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["lis
|
||||
p={4}
|
||||
bg="white"
|
||||
borderRadius={4}
|
||||
_hover={{ bg: "gray.100" }}
|
||||
_hover={{ bg: isMenuHovered ? undefined : "gray.100" }}
|
||||
transition="background 0.2s"
|
||||
aspectRatio={1.2}
|
||||
>
|
||||
@@ -38,9 +51,17 @@ export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["lis
|
||||
href={{ pathname: "/experiments/[experimentSlug]", query: { experimentSlug: exp.slug } }}
|
||||
justify="space-between"
|
||||
>
|
||||
<HStack w="full" color="gray.700" justify="center">
|
||||
<Icon as={RiFlaskLine} boxSize={4} />
|
||||
<Text fontWeight="bold">{exp.label}</Text>
|
||||
<HStack w="full" justify="space-between" spacing={0}>
|
||||
<Box w={6} />
|
||||
<HStack color="gray.700" justify="center">
|
||||
<Icon as={RiFlaskLine} boxSize={4} />
|
||||
<Text fontWeight="bold">{exp.label}</Text>
|
||||
</HStack>
|
||||
<CardMenu
|
||||
experimentId={exp.id}
|
||||
experimentSlug={exp.slug}
|
||||
setIsMenuHovered={setIsMenuHovered}
|
||||
/>
|
||||
</HStack>
|
||||
<HStack h="full" spacing={4} flex={1} align="center">
|
||||
<CountLabel label="Variants" count={exp.promptVariantCount} />
|
||||
@@ -57,6 +78,75 @@ export const ExperimentCard = ({ exp }: { exp: RouterOutputs["experiments"]["lis
|
||||
);
|
||||
};
|
||||
|
||||
const CardMenu = ({
|
||||
experimentId,
|
||||
experimentSlug,
|
||||
setIsMenuHovered,
|
||||
}: {
|
||||
experimentId: string;
|
||||
experimentSlug: string;
|
||||
setIsMenuHovered: (isHovered: boolean) => void;
|
||||
}) => {
|
||||
const deleteDisclosure = useDisclosure();
|
||||
const menuDisclosure = useDisclosure();
|
||||
const toast = useToast();
|
||||
const [copyShareLink] = useHandledAsyncCallback(
|
||||
async (e: MouseEvent<HTMLButtonElement>) => {
|
||||
if (typeof window === "undefined") return;
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
const shareLink = `${window.location.origin}/experiments/${experimentSlug}`;
|
||||
await navigator.clipboard.writeText(shareLink);
|
||||
toast({
|
||||
title: "Share link copied to clipboard",
|
||||
status: "success",
|
||||
duration: 2000,
|
||||
isClosable: true,
|
||||
});
|
||||
menuDisclosure.onClose();
|
||||
},
|
||||
[toast, menuDisclosure.onClose, experimentSlug],
|
||||
);
|
||||
return (
|
||||
<>
|
||||
<Menu isLazy {...menuDisclosure}>
|
||||
<MenuButton
|
||||
as={IconButton}
|
||||
aria-label="Options"
|
||||
icon={<BsThreeDotsVertical />}
|
||||
variant="ghost"
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
menuDisclosure.onOpen();
|
||||
}}
|
||||
onMouseEnter={() => setIsMenuHovered(true)}
|
||||
onMouseLeave={() => setIsMenuHovered(false)}
|
||||
boxSize={6}
|
||||
minW={0}
|
||||
/>
|
||||
<MenuList>
|
||||
<MenuItem icon={<Icon as={BsLink45Deg} boxSize={5} />} onClick={copyShareLink}>
|
||||
Copy Link
|
||||
</MenuItem>
|
||||
<MenuItem
|
||||
icon={<Icon as={BsTrash} boxSize={5} />}
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
deleteDisclosure.onOpen();
|
||||
}}
|
||||
color="red.500"
|
||||
>
|
||||
Delete
|
||||
</MenuItem>
|
||||
</MenuList>
|
||||
</Menu>
|
||||
<DeleteExperimentDialog experimentId={experimentId} disclosure={deleteDisclosure} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const CountLabel = ({ label, count }: { label: string; count: number }) => {
|
||||
return (
|
||||
<VStack alignItems="center" flex={1}>
|
||||
@@ -98,9 +188,7 @@ export const NewExperimentCard = () => {
|
||||
>
|
||||
<VStack align="center" justify="center" w="full" h="full" p={4} onClick={createExperiment}>
|
||||
<Icon as={isLoading ? Spinner : BsPlusSquare} boxSize={8} />
|
||||
<Text display={{ base: "none", md: "block" }} ml={2}>
|
||||
New Experiment
|
||||
</Text>
|
||||
<Text ml={2}>New Experiment</Text>
|
||||
</VStack>
|
||||
</Card>
|
||||
);
|
||||
|
||||
@@ -13,15 +13,18 @@ import {
|
||||
} from "@chakra-ui/react";
|
||||
import Head from "next/head";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/router";
|
||||
import { BsGearFill, BsGithub, BsPersonCircle } from "react-icons/bs";
|
||||
import { IoStatsChartOutline } from "react-icons/io5";
|
||||
import { RiHome3Line, RiFlaskLine } from "react-icons/ri";
|
||||
import { FaRobot } from "react-icons/fa";
|
||||
import { AiOutlineThunderbolt } from "react-icons/ai";
|
||||
import { FaReadme } from "react-icons/fa";
|
||||
import { signIn, useSession } from "next-auth/react";
|
||||
|
||||
import ProjectMenu from "./ProjectMenu";
|
||||
import NavSidebarOption from "./NavSidebarOption";
|
||||
import IconLink from "./IconLink";
|
||||
import { BetaModal } from "./BetaModal";
|
||||
import { BetaModal } from "../BetaModal";
|
||||
import { useAppStore } from "~/state/store";
|
||||
|
||||
const Divider = () => <Box h="1px" bgColor="gray.300" w="full" />;
|
||||
@@ -73,9 +76,9 @@ const NavSidebar = () => {
|
||||
<ProjectMenu />
|
||||
<Divider />
|
||||
|
||||
<IconLink icon={RiHome3Line} label="Dashboard" href="/dashboard" beta />
|
||||
<IconLink icon={IoStatsChartOutline} label="Request Logs" href="/request-logs" beta />
|
||||
<IconLink icon={FaRobot} label="Fine Tunes" href="/fine-tunes" beta />
|
||||
<IconLink icon={RiHome3Line} label="Dashboard" href="/dashboard" />
|
||||
<IconLink icon={IoStatsChartOutline} label="Request Logs" href="/request-logs" />
|
||||
<IconLink icon={AiOutlineThunderbolt} label="Fine Tunes" href="/fine-tunes" beta />
|
||||
<IconLink icon={RiFlaskLine} label="Experiments" href="/experiments" />
|
||||
<VStack w="full" alignItems="flex-start" spacing={0} pt={8}>
|
||||
<Text
|
||||
@@ -111,7 +114,22 @@ const NavSidebar = () => {
|
||||
</NavSidebarOption>
|
||||
)}
|
||||
</VStack>
|
||||
|
||||
<HStack
|
||||
w="full"
|
||||
px={{ base: 2, md: 4 }}
|
||||
py={{ base: 1, md: 2 }}
|
||||
as={ChakraLink}
|
||||
justifyContent="start"
|
||||
href="https://docs.openpipe.ai"
|
||||
target="_blank"
|
||||
color="gray.500"
|
||||
spacing={1}
|
||||
>
|
||||
<Icon as={FaReadme} boxSize={4} mr={2} />
|
||||
<Text fontWeight="bold" fontSize="sm">
|
||||
Read the Docs
|
||||
</Text>
|
||||
</HStack>
|
||||
<Divider />
|
||||
<VStack spacing={0} align="center">
|
||||
<ChakraLink
|
||||
@@ -140,6 +158,7 @@ export default function AppShell({
|
||||
requireBeta?: boolean;
|
||||
}) {
|
||||
const [vh, setVh] = useState("100vh"); // Default height to prevent flicker on initial render
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
const setHeight = () => {
|
||||
@@ -181,7 +200,7 @@ export default function AppShell({
|
||||
{children}
|
||||
</Box>
|
||||
</Flex>
|
||||
{requireBeta && flagsLoaded && !flags.betaAccess && <BetaModal />}
|
||||
<BetaModal isOpen={!!requireBeta && flagsLoaded && !flags.betaAccess} onClose={router.back} />
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -57,6 +57,7 @@ export default function ProjectMenu() {
|
||||
await utils.projects.list.invalidate();
|
||||
setSelectedProjectId(newProj.id);
|
||||
await router.push({ pathname: "/project/settings" });
|
||||
popover.onClose();
|
||||
}, [createMutation, router]);
|
||||
|
||||
const user = useSession().data;
|
||||
|
||||
@@ -1,29 +1,50 @@
|
||||
import { useState } from "react";
|
||||
|
||||
import { Button, HStack, type ButtonProps, Icon, Text } from "@chakra-ui/react";
|
||||
import { type IconType } from "react-icons";
|
||||
import { useAppStore } from "~/state/store";
|
||||
import { BetaModal } from "../BetaModal";
|
||||
|
||||
const ActionButton = ({
|
||||
icon,
|
||||
label,
|
||||
requireBeta = false,
|
||||
onClick,
|
||||
...buttonProps
|
||||
}: { icon: IconType; label: string } & ButtonProps) => {
|
||||
}: {
|
||||
icon: IconType;
|
||||
label: string;
|
||||
requireBeta?: boolean;
|
||||
onClick?: () => void;
|
||||
} & ButtonProps) => {
|
||||
const flags = useAppStore((s) => s.featureFlags.featureFlags);
|
||||
const flagsLoaded = useAppStore((s) => s.featureFlags.flagsLoaded);
|
||||
|
||||
const [betaModalOpen, setBetaModalOpen] = useState(false);
|
||||
|
||||
const isBetaBlocked = requireBeta && flagsLoaded && !flags.betaAccess;
|
||||
return (
|
||||
<Button
|
||||
colorScheme="blue"
|
||||
color="black"
|
||||
bgColor="white"
|
||||
borderColor="gray.300"
|
||||
borderRadius={4}
|
||||
variant="outline"
|
||||
size="sm"
|
||||
fontSize="sm"
|
||||
fontWeight="normal"
|
||||
{...buttonProps}
|
||||
>
|
||||
<HStack spacing={1}>
|
||||
{icon && <Icon as={icon} />}
|
||||
<Text display={{ base: "none", md: "flex" }}>{label}</Text>
|
||||
</HStack>
|
||||
</Button>
|
||||
<>
|
||||
<Button
|
||||
colorScheme="blue"
|
||||
color="black"
|
||||
bgColor="white"
|
||||
borderColor="gray.300"
|
||||
borderRadius={4}
|
||||
variant="outline"
|
||||
size="sm"
|
||||
fontSize="sm"
|
||||
fontWeight="normal"
|
||||
onClick={isBetaBlocked ? () => setBetaModalOpen(true) : onClick}
|
||||
{...buttonProps}
|
||||
>
|
||||
<HStack spacing={1}>
|
||||
{icon && <Icon as={icon} color={requireBeta ? "orange.400" : undefined} />}
|
||||
<Text display={{ base: "none", md: "flex" }}>{label}</Text>
|
||||
</HStack>
|
||||
</Button>
|
||||
<BetaModal isOpen={betaModalOpen} onClose={() => setBetaModalOpen(false)} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@ const ExportButton = () => {
|
||||
label="Export"
|
||||
icon={BiExport}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
requireBeta
|
||||
/>
|
||||
<ExportLogsModal disclosure={disclosure} />
|
||||
</>
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
type UseDisclosureReturn,
|
||||
Input,
|
||||
} from "@chakra-ui/react";
|
||||
import { FaRobot } from "react-icons/fa";
|
||||
import { AiTwotoneThunderbolt } from "react-icons/ai";
|
||||
import humanId from "human-id";
|
||||
import { useRouter } from "next/router";
|
||||
|
||||
@@ -39,8 +39,9 @@ const FineTuneButton = () => {
|
||||
<ActionButton
|
||||
onClick={disclosure.onOpen}
|
||||
label="Fine Tune"
|
||||
icon={FaRobot}
|
||||
icon={AiTwotoneThunderbolt}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
requireBeta
|
||||
/>
|
||||
<FineTuneModal disclosure={disclosure} />
|
||||
</>
|
||||
@@ -90,7 +91,7 @@ const FineTuneModal = ({ disclosure }: { disclosure: UseDisclosureReturn }) => {
|
||||
<ModalContent w={1200}>
|
||||
<ModalHeader>
|
||||
<HStack>
|
||||
<Icon as={FaRobot} />
|
||||
<Icon as={AiTwotoneThunderbolt} />
|
||||
<Text>Fine Tune</Text>
|
||||
</HStack>
|
||||
</ModalHeader>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Card, Table, Tbody } from "@chakra-ui/react";
|
||||
import { useState } from "react";
|
||||
import { useLoggedCalls } from "~/utils/hooks";
|
||||
import { TableHeader, TableRow } from "./TableRow";
|
||||
import { TableHeader, TableRow, EmptyTableRow } from "./TableRow";
|
||||
|
||||
export default function LoggedCallsTable() {
|
||||
const [expandedRow, setExpandedRow] = useState<string | null>(null);
|
||||
@@ -12,23 +12,27 @@ export default function LoggedCallsTable() {
|
||||
<Table>
|
||||
<TableHeader showOptions />
|
||||
<Tbody>
|
||||
{loggedCalls?.calls?.map((loggedCall) => {
|
||||
return (
|
||||
<TableRow
|
||||
key={loggedCall.id}
|
||||
loggedCall={loggedCall}
|
||||
isExpanded={loggedCall.id === expandedRow}
|
||||
onToggle={() => {
|
||||
if (loggedCall.id === expandedRow) {
|
||||
setExpandedRow(null);
|
||||
} else {
|
||||
setExpandedRow(loggedCall.id);
|
||||
}
|
||||
}}
|
||||
showOptions
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{loggedCalls?.calls.length ? (
|
||||
loggedCalls?.calls?.map((loggedCall) => {
|
||||
return (
|
||||
<TableRow
|
||||
key={loggedCall.id}
|
||||
loggedCall={loggedCall}
|
||||
isExpanded={loggedCall.id === expandedRow}
|
||||
onToggle={() => {
|
||||
if (loggedCall.id === expandedRow) {
|
||||
setExpandedRow(null);
|
||||
} else {
|
||||
setExpandedRow(loggedCall.id);
|
||||
}
|
||||
}}
|
||||
showOptions
|
||||
/>
|
||||
);
|
||||
})
|
||||
) : (
|
||||
<EmptyTableRow />
|
||||
)}
|
||||
</Tbody>
|
||||
</Table>
|
||||
</Card>
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
ButtonGroup,
|
||||
Text,
|
||||
Checkbox,
|
||||
Link as ChakraLink,
|
||||
} from "@chakra-ui/react";
|
||||
import Link from "next/link";
|
||||
|
||||
@@ -198,3 +199,41 @@ export const TableRow = ({
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export const EmptyTableRow = ({ filtersApplied = true }: { filtersApplied?: boolean }) => {
|
||||
const visibleColumns = useAppStore((s) => s.columnVisibility.visibleColumns);
|
||||
const filters = useAppStore((state) => state.logFilters.filters);
|
||||
const { isLoading } = useLoggedCalls();
|
||||
|
||||
if (isLoading) return null;
|
||||
|
||||
if (filters.length && filtersApplied) {
|
||||
return (
|
||||
<Tr>
|
||||
<Td w="full" colSpan={visibleColumns.size + 1}>
|
||||
<Text color="gray.500" textAlign="center" w="full" p={4}>
|
||||
No matching request logs found. Try removing some filters.
|
||||
</Text>
|
||||
</Td>
|
||||
</Tr>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Tr>
|
||||
<Td w="full" colSpan={visibleColumns.size + 1}>
|
||||
<Text color="gray.500" textAlign="center" w="full" p={4}>
|
||||
This project has no request logs. Learn how to add request logs to your project in our{" "}
|
||||
<ChakraLink
|
||||
href="https://docs.openpipe.ai/getting-started/quick-start"
|
||||
target="_blank"
|
||||
color="blue.600"
|
||||
>
|
||||
Quick Start
|
||||
</ChakraLink>{" "}
|
||||
guide.
|
||||
</Text>
|
||||
</Td>
|
||||
</Tr>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import { isArray, isString } from "lodash-es";
|
||||
import { APIError } from "openai";
|
||||
import { type ChatCompletion, type CompletionCreateParams } from "openai/resources/chat";
|
||||
import mergeChunks from "openpipe/src/openai/mergeChunks";
|
||||
import mergeChunks from "openpipe/openai/mergeChunks";
|
||||
import { openai } from "~/server/utils/openai";
|
||||
import { type CompletionResponse } from "../types";
|
||||
|
||||
|
||||
@@ -8,8 +8,8 @@ const replicate = new Replicate({
|
||||
});
|
||||
|
||||
const modelIds: Record<ReplicateLlama2Input["model"], string> = {
|
||||
"7b-chat": "7b0bfc9aff140d5b75bacbed23e91fd3c34b01a1e958d32132de6e0a19796e2c",
|
||||
"13b-chat": "2a7f981751ec7fdf87b5b91ad4db53683a98082e9ff7bfd12c8cd5ea85980a52",
|
||||
"7b-chat": "d24902e3fa9b698cc208b5e63136c4e26e828659a9f09827ca6ec5bb83014381",
|
||||
"13b-chat": "9dff94b1bed5af738655d4a7cbcdcde2bd503aa85c94334fe1f42af7f3dd5ee3",
|
||||
"70b-chat": "2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1",
|
||||
};
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ export default function Dashboard() {
|
||||
);
|
||||
|
||||
return (
|
||||
<AppShell title="Dashboard" requireAuth requireBeta>
|
||||
<AppShell title="Dashboard" requireAuth>
|
||||
<VStack px={8} py={8} alignItems="flex-start" spacing={4}>
|
||||
<Text fontSize="2xl" fontWeight="bold">
|
||||
Dashboard
|
||||
|
||||
@@ -19,7 +19,7 @@ export default function LoggedCalls() {
|
||||
const [filtersShown, setFiltersShown] = useState(true);
|
||||
|
||||
return (
|
||||
<AppShell title="Request Logs" requireAuth requireBeta>
|
||||
<AppShell title="Request Logs" requireAuth>
|
||||
<Box h="100vh" overflowY="scroll">
|
||||
<VStack px={8} py={8} alignItems="flex-start" spacing={4} w="full">
|
||||
<Text fontSize="2xl" fontWeight="bold">
|
||||
@@ -35,6 +35,7 @@ export default function LoggedCalls() {
|
||||
label="Experiment"
|
||||
icon={RiFlaskLine}
|
||||
isDisabled={selectedLogIds.size === 0}
|
||||
requireBeta
|
||||
/>
|
||||
<ExportButton />
|
||||
<ColumnVisiblityDropdown />
|
||||
|
||||
@@ -196,7 +196,10 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
? `${originalVariant?.label} Copy`
|
||||
: `Prompt Variant ${largestSortIndex + 2}`;
|
||||
|
||||
const newConstructFn = await deriveNewConstructFn(originalVariant);
|
||||
const newConstructFn = await deriveNewConstructFn(
|
||||
originalVariant,
|
||||
originalVariant?.promptConstructor,
|
||||
);
|
||||
|
||||
const createNewVariantAction = prisma.promptVariant.create({
|
||||
data: {
|
||||
@@ -298,6 +301,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
originalPromptFn: z.string(),
|
||||
instructions: z.string().optional(),
|
||||
newModel: z
|
||||
.object({
|
||||
@@ -315,22 +319,21 @@ export const promptVariantsRouter = createTRPCRouter({
|
||||
});
|
||||
await requireCanModifyExperiment(existing.experimentId, ctx);
|
||||
|
||||
const constructedPrompt = await parsePromptConstructor(existing.promptConstructor);
|
||||
|
||||
if ("error" in constructedPrompt) {
|
||||
return error(constructedPrompt.error);
|
||||
}
|
||||
|
||||
const model = input.newModel
|
||||
? modelProviders[input.newModel.provider].models[input.newModel.model]
|
||||
: undefined;
|
||||
|
||||
const promptConstructionFn = await deriveNewConstructFn(existing, model, input.instructions);
|
||||
const promptConstructionFn = await deriveNewConstructFn(
|
||||
existing,
|
||||
input.originalPromptFn,
|
||||
model,
|
||||
input.instructions,
|
||||
);
|
||||
|
||||
// TODO: Validate promptConstructionFn
|
||||
// TODO: Record in some sort of history
|
||||
|
||||
return promptConstructionFn;
|
||||
return success(promptConstructionFn);
|
||||
}),
|
||||
|
||||
replaceVariant: protectedProcedure
|
||||
|
||||
@@ -12,30 +12,37 @@ const isolate = new ivm.Isolate({ memoryLimit: 128 });
|
||||
|
||||
export async function deriveNewConstructFn(
|
||||
originalVariant: PromptVariant | null,
|
||||
originalPromptFn?: string,
|
||||
newModel?: Model,
|
||||
instructions?: string,
|
||||
) {
|
||||
if (originalVariant && !newModel && !instructions) {
|
||||
return originalVariant.promptConstructor;
|
||||
if (originalPromptFn && !newModel && !instructions) {
|
||||
return originalPromptFn;
|
||||
}
|
||||
if (originalVariant && (newModel || instructions)) {
|
||||
return await requestUpdatedPromptFunction(originalVariant, newModel, instructions);
|
||||
if (originalVariant && originalPromptFn && (newModel || instructions)) {
|
||||
return await requestUpdatedPromptFunction(
|
||||
originalVariant,
|
||||
originalPromptFn,
|
||||
newModel,
|
||||
instructions,
|
||||
);
|
||||
}
|
||||
return dedent`
|
||||
prompt = {
|
||||
model: "gpt-3.5-turbo",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: "Return 'Hello, world!'",
|
||||
}
|
||||
]
|
||||
}`;
|
||||
definePrompt("openai/ChatCompletion", {
|
||||
model: "gpt-3.5-turbo-0613",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: \`Hello, world!\`,
|
||||
},
|
||||
],
|
||||
});`;
|
||||
}
|
||||
|
||||
const NUM_RETRIES = 5;
|
||||
const requestUpdatedPromptFunction = async (
|
||||
originalVariant: PromptVariant,
|
||||
originalPromptFn: string,
|
||||
newModel?: Model,
|
||||
instructions?: string,
|
||||
) => {
|
||||
@@ -55,7 +62,7 @@ const requestUpdatedPromptFunction = async (
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: `This is the current prompt constructor function:\n---\n${originalVariant.promptConstructor}`,
|
||||
content: `This is the current prompt constructor function:\n---\n${originalPromptFn}`,
|
||||
},
|
||||
];
|
||||
if (newModel) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import OpenAI, { type ClientOptions } from "openpipe/src/openai";
|
||||
import OpenAI, { type ClientOptions } from "openpipe/openai";
|
||||
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
|
||||
@@ -1,16 +1,26 @@
|
||||
import loader, { type Monaco } from "@monaco-editor/loader";
|
||||
|
||||
import { type RouterOutputs } from "~/utils/api";
|
||||
import { type SliceCreator } from "./store";
|
||||
import loader from "@monaco-editor/loader";
|
||||
import formatPromptConstructor from "~/promptConstructor/format";
|
||||
|
||||
export const editorBackground = "#fafafa";
|
||||
|
||||
export type CreatedEditor = ReturnType<Monaco["editor"]["create"]>;
|
||||
|
||||
type EditorOptions = {
|
||||
getContent: () => string;
|
||||
setContent: (content: string) => void;
|
||||
};
|
||||
|
||||
export type SharedVariantEditorSlice = {
|
||||
monaco: null | ReturnType<typeof loader.__getMonacoInstance>;
|
||||
monaco: null | Monaco;
|
||||
loadMonaco: () => Promise<void>;
|
||||
scenarioVars: RouterOutputs["scenarioVars"]["list"];
|
||||
updateScenariosModel: () => void;
|
||||
setScenarioVars: (scenarioVars: RouterOutputs["scenarioVars"]["list"]) => void;
|
||||
editorOptionsMap: Record<string, EditorOptions>;
|
||||
updateOptionsForEditor: (uiId: string, { getContent, setContent }: EditorOptions) => void;
|
||||
};
|
||||
|
||||
export const createVariantEditorSlice: SliceCreator<SharedVariantEditorSlice> = (set, get) => ({
|
||||
@@ -93,4 +103,10 @@ export const createVariantEditorSlice: SliceCreator<SharedVariantEditorSlice> =
|
||||
);
|
||||
}
|
||||
},
|
||||
editorOptionsMap: {},
|
||||
updateOptionsForEditor: (uiId, options) => {
|
||||
set((state) => {
|
||||
state.sharedVariantEditor.editorOptionsMap[uiId] = options;
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
@@ -148,13 +148,13 @@ export const useScenarioVars = () => {
|
||||
);
|
||||
};
|
||||
|
||||
export const useLoggedCalls = () => {
|
||||
export const useLoggedCalls = (applyFilters = true) => {
|
||||
const selectedProjectId = useAppStore((state) => state.selectedProjectId);
|
||||
const { page, pageSize } = usePageParams();
|
||||
const filters = useAppStore((state) => state.logFilters.filters);
|
||||
|
||||
const { data, isLoading, ...rest } = api.loggedCalls.list.useQuery(
|
||||
{ projectId: selectedProjectId ?? "", page, pageSize, filters },
|
||||
{ projectId: selectedProjectId ?? "", page, pageSize, filters: applyFilters ? filters : [] },
|
||||
{ enabled: !!selectedProjectId },
|
||||
);
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
This client allows you automatically report your OpenAI calls to [OpenPipe](https://openpipe.ai/). OpenPipe
|
||||
|
||||
## Installation
|
||||
|
||||
`pip install openpipe`
|
||||
|
||||
## Usage
|
||||
@@ -15,7 +16,7 @@ This client allows you automatically report your OpenAI calls to [OpenPipe](http
|
||||
from openpipe import openai, configure_openpipe
|
||||
import os
|
||||
|
||||
# Set the OpenPipe API key you got in step (3) above.
|
||||
# Set the OpenPipe API key you got in step (2) above.
|
||||
# If you have the `OPENPIPE_API_KEY` environment variable set we'll read from it by default.
|
||||
configure_openpipe(api_key=os.getenv("OPENPIPE_API_KEY"))
|
||||
|
||||
@@ -23,7 +24,7 @@ configure_openpipe(api_key=os.getenv("OPENPIPE_API_KEY"))
|
||||
openai.api_key = os.getenv("OPENAI_API_KEY")
|
||||
```
|
||||
|
||||
You can use the OpenPipe client for normal
|
||||
You can now use your new OpenAI client, which functions identically to the generic OpenAI client while also reporting calls to your OpenPipe instance.
|
||||
|
||||
## Special Features
|
||||
|
||||
|
||||
@@ -6,11 +6,9 @@ from openpipe.api_client.client import AuthenticatedClient
|
||||
from openpipe.api_client.models.report_json_body_tags import (
|
||||
ReportJsonBodyTags,
|
||||
)
|
||||
import toml
|
||||
import time
|
||||
import os
|
||||
|
||||
version = toml.load("pyproject.toml")["tool"]["poetry"]["version"]
|
||||
import pkg_resources
|
||||
|
||||
configured_client = AuthenticatedClient(
|
||||
base_url="https://app.openpipe.ai/api/v1", token=""
|
||||
@@ -23,7 +21,7 @@ if os.environ.get("OPENPIPE_API_KEY"):
|
||||
def _get_tags(openpipe_options):
|
||||
tags = openpipe_options.get("tags") or {}
|
||||
tags["$sdk"] = "python"
|
||||
tags["$sdk.version"] = version
|
||||
tags["$sdk.version"] = pkg_resources.get_distribution('openpipe').version
|
||||
|
||||
return ReportJsonBodyTags.from_dict(tags)
|
||||
|
||||
|
||||
23
client-libs/python/poetry.lock
generated
@@ -1056,6 +1056,7 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
|
||||
@@ -1063,8 +1064,15 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
|
||||
@@ -1081,6 +1089,7 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
|
||||
@@ -1088,6 +1097,7 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
|
||||
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
|
||||
@@ -1147,17 +1157,6 @@ files = [
|
||||
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.10.2"
|
||||
description = "Python Library for Tom's Obvious, Minimal Language"
|
||||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
files = [
|
||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.0.1"
|
||||
@@ -1367,4 +1366,4 @@ multidict = ">=4.0"
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "e93c2ecac1b81a4fc1f9ad3dcedf03b1126cc6815e084ae233da7d3ece313ade"
|
||||
content-hash = "f50c3ee43ebb9510bf42b9a16d8d6a92d561bec40e8f3c11fb2614e92a5b756f"
|
||||
|
||||
11
client-libs/python/publish.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Check if PYPI_OPENPIPE_TOKEN is set
|
||||
if [[ -z "${PYPI_OPENPIPE_TOKEN}" ]]; then
|
||||
echo "Error: PYPI_OPENPIPE_TOKEN is not set."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If the token is set, proceed with publishing
|
||||
poetry publish --build --username=__token__ --password=$PYPI_OPENPIPE_TOKEN
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "openpipe"
|
||||
version = "3.0.1"
|
||||
version = "3.1.2"
|
||||
description = "Python client library for the OpenPipe service"
|
||||
authors = ["Kyle Corbitt <kyle@openpipe.ai>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -14,7 +14,6 @@ openai = "^0.27.8"
|
||||
httpx = "^0.24.1"
|
||||
attrs = "^23.1.0"
|
||||
python-dateutil = "^2.8.2"
|
||||
toml = "^0.10.2"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
|
||||
|
||||
70
client-libs/typescript/README.md
Normal file
@@ -0,0 +1,70 @@
|
||||
# OpenPipe Node API Library
|
||||
|
||||
[](https://npmjs.org/package/openpipe)
|
||||
|
||||
This library wraps TypeScript or Javascript OpenAI API calls and logs additional data to the configured `OPENPIPE_BASE_URL` for further processing.
|
||||
|
||||
It is fully compatible with OpenAI's sdk and logs both streaming and non-streaming requests and responses.
|
||||
|
||||
<!-- To learn more about using OpenPipe, check out our [Documentation](https://docs.openpipe.ai/docs/api). -->
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install --save openpipe
|
||||
# or
|
||||
yarn add openpipe
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
1. Create a project at https://app.openpipe.ai
|
||||
2. Find your project's API key at https://app.openpipe.ai/project/settings
|
||||
3. Configure the OpenPipe client as shown below.
|
||||
|
||||
```js
|
||||
// import OpenAI from 'openai'
|
||||
import OpenAI from "openpipe/openai";
|
||||
|
||||
// Fully compatible with original OpenAI initialization
|
||||
const openai = new OpenAI({
|
||||
apiKey: "my api key", // defaults to process.env["OPENAI_API_KEY"]
|
||||
// openpipe key is optional
|
||||
openpipe: {
|
||||
apiKey: "my api key", // defaults to process.env["OPENPIPE_API_KEY"]
|
||||
baseUrl: "my url", // defaults to process.env["OPENPIPE_BASE_URL"] or https://app.openpipe.ai/api/v1 if not set
|
||||
},
|
||||
});
|
||||
|
||||
async function main() {
|
||||
// Allows optional openpipe object
|
||||
const completion = await openai.chat.completions.create({
|
||||
messages: [{ role: "user", content: "Say this is a test" }],
|
||||
model: "gpt-3.5-turbo",
|
||||
// optional
|
||||
openpipe: {
|
||||
// Add custom searchable tags
|
||||
tags: {
|
||||
prompt_id: "getCompletion",
|
||||
any_key: "any_value",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(completion.choices);
|
||||
}
|
||||
|
||||
main();
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
<b><i>How do I report calls to my self-hosted instance?</i></b>
|
||||
|
||||
Start an instance by following the instructions on [Running Locally](https://github.com/OpenPipe/OpenPipe#running-locally). Once it's running, point your `OPENPIPE_BASE_URL` to your self-hosted instance.
|
||||
|
||||
<b><i>What if my `OPENPIPE_BASE_URL` is misconfigured or my instance goes down? Will my OpenAI calls stop working?</i></b>
|
||||
|
||||
Your OpenAI calls will continue to function as expected no matter what. The sdk handles logging errors gracefully without affecting OpenAI inference.
|
||||
|
||||
See the [GitHub repo](https://github.com/OpenPipe/OpenPipe) for more details.
|
||||
27
client-libs/typescript/build.sh
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Adapted from https://github.com/openai/openai-node/blob/master/build
|
||||
|
||||
set -exuo pipefail
|
||||
|
||||
rm -rf dist /tmp/openpipe-build-dist
|
||||
|
||||
mkdir /tmp/openpipe-build-dist
|
||||
|
||||
cp -rp * /tmp/openpipe-build-dist
|
||||
|
||||
# Rename package name in package.json
|
||||
python3 -c "
|
||||
import json
|
||||
with open('/tmp/openpipe-build-dist/package.json', 'r') as f:
|
||||
data = json.load(f)
|
||||
data['name'] = 'openpipe'
|
||||
with open('/tmp/openpipe-build-dist/package.json', 'w') as f:
|
||||
json.dump(data, f, indent=4)
|
||||
"
|
||||
|
||||
rm -rf /tmp/openpipe-build-dist/node_modules
|
||||
mv /tmp/openpipe-build-dist dist
|
||||
|
||||
# build to .js files
|
||||
(cd dist && npm exec tsc -- --noEmit false)
|
||||
@@ -1,3 +1 @@
|
||||
// main.ts or index.ts at the root level
|
||||
export * as OpenAI from "./src/openai";
|
||||
export * as OpenAILegacy from "./src/openai-legacy";
|
||||
export * as openai from "./openai";
|
||||
|
||||
@@ -80,6 +80,7 @@ test("bad call streaming", async () => {
|
||||
stream: true,
|
||||
});
|
||||
} catch (e) {
|
||||
// @ts-expect-error need to check for error type
|
||||
await e.openpipe.reportingFinished;
|
||||
const lastLogged = await lastLoggedCall();
|
||||
expect(lastLogged?.modelResponse?.errorMessage).toEqual(
|
||||
@@ -96,7 +97,9 @@ test("bad call", async () => {
|
||||
messages: [{ role: "system", content: "count to 10" }],
|
||||
});
|
||||
} catch (e) {
|
||||
// @ts-expect-error need to check for error type
|
||||
assert("openpipe" in e);
|
||||
// @ts-expect-error need to check for error type
|
||||
await e.openpipe.reportingFinished;
|
||||
const lastLogged = await lastLoggedCall();
|
||||
expect(lastLogged?.modelResponse?.errorMessage).toEqual(
|
||||
@@ -120,7 +123,8 @@ test("caching", async () => {
|
||||
|
||||
await completion.openpipe.reportingFinished;
|
||||
const firstLogged = await lastLoggedCall();
|
||||
expect(completion.choices[0].message.content).toEqual(
|
||||
|
||||
expect(completion.choices[0]?.message.content).toEqual(
|
||||
firstLogged?.modelResponse?.respPayload.choices[0].message.content,
|
||||
);
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
{
|
||||
"name": "openpipe",
|
||||
"version": "0.1.0",
|
||||
"name": "openpipe-dev",
|
||||
"version": "0.3.5",
|
||||
"type": "module",
|
||||
"description": "Metrics and auto-evaluation for LLM calls",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"build": "./build.sh",
|
||||
"test": "vitest"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"main": "./index.ts",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"main": "./index.js"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
9
client-libs/typescript/publish.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Adapted from https://github.com/openai/openai-node/blob/master/build
|
||||
|
||||
set -exuo pipefail
|
||||
|
||||
./build.sh
|
||||
|
||||
(cd dist && pnpm publish --access public)
|
||||
@@ -1,4 +1,5 @@
|
||||
import pkg from "../package.json";
|
||||
import pkg from "./package.json";
|
||||
|
||||
import { DefaultService } from "./codegen";
|
||||
|
||||
export type OpenPipeConfig = {
|
||||
@@ -1,85 +0,0 @@
|
||||
import * as openPipeClient from "../codegen";
|
||||
import * as openai from "openai-legacy";
|
||||
import { version } from "../../package.json";
|
||||
|
||||
// Anything we don't override we want to pass through to openai directly
|
||||
export * as openAILegacy from "openai-legacy";
|
||||
|
||||
type OPConfigurationParameters = {
|
||||
apiKey?: string;
|
||||
basePath?: string;
|
||||
};
|
||||
|
||||
export class Configuration extends openai.Configuration {
|
||||
public qkConfig?: openPipeClient.Configuration;
|
||||
|
||||
constructor(
|
||||
config: openai.ConfigurationParameters & {
|
||||
opParameters?: OPConfigurationParameters;
|
||||
}
|
||||
) {
|
||||
super(config);
|
||||
if (config.opParameters) {
|
||||
this.qkConfig = new openPipeClient.Configuration(config.opParameters);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type CreateChatCompletion = InstanceType<typeof openai.OpenAIApi>["createChatCompletion"];
|
||||
|
||||
export class OpenAIApi extends openai.OpenAIApi {
|
||||
public openPipeApi?: openPipeClient.DefaultApi;
|
||||
|
||||
constructor(config: Configuration) {
|
||||
super(config);
|
||||
if (config.qkConfig) {
|
||||
this.openPipeApi = new openPipeClient.DefaultApi(config.qkConfig);
|
||||
}
|
||||
}
|
||||
|
||||
public async createChatCompletion(
|
||||
createChatCompletionRequest: Parameters<CreateChatCompletion>[0],
|
||||
options?: Parameters<CreateChatCompletion>[1]
|
||||
): ReturnType<CreateChatCompletion> {
|
||||
const requestedAt = Date.now();
|
||||
let resp: Awaited<ReturnType<CreateChatCompletion>> | null = null;
|
||||
let respPayload: openai.CreateChatCompletionResponse | null = null;
|
||||
let statusCode: number | undefined = undefined;
|
||||
let errorMessage: string | undefined;
|
||||
try {
|
||||
resp = await super.createChatCompletion(createChatCompletionRequest, options);
|
||||
respPayload = resp.data;
|
||||
statusCode = resp.status;
|
||||
} catch (err) {
|
||||
console.error("Error in createChatCompletion");
|
||||
if ("isAxiosError" in err && err.isAxiosError) {
|
||||
errorMessage = err.response?.data?.error?.message;
|
||||
respPayload = err.response?.data;
|
||||
statusCode = err.response?.status;
|
||||
} else if ("message" in err) {
|
||||
errorMessage = err.message.toString();
|
||||
}
|
||||
throw err;
|
||||
} finally {
|
||||
this.openPipeApi
|
||||
?.externalApiReport({
|
||||
requestedAt,
|
||||
receivedAt: Date.now(),
|
||||
reqPayload: createChatCompletionRequest,
|
||||
respPayload: respPayload,
|
||||
statusCode: statusCode,
|
||||
errorMessage,
|
||||
tags: {
|
||||
client: "openai-js",
|
||||
clientVersion: version,
|
||||
},
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error("Error reporting to OP", err);
|
||||
});
|
||||
}
|
||||
|
||||
console.log("done");
|
||||
return resp;
|
||||
}
|
||||
}
|
||||
@@ -14,9 +14,12 @@
|
||||
"isolatedModules": true,
|
||||
"incremental": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"baseUrl": ".",
|
||||
"outDir": "dist"
|
||||
"noEmit": true,
|
||||
"sourceMap": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": ["src/**/*.ts"],
|
||||
"include": ["**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
||||
23
docs/faq/how-reporting-works.mdx
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
title: "How reporting works"
|
||||
description: "Our SDK wraps calls and forwards requests"
|
||||
---
|
||||
|
||||
### Does reporting calls add latency to streamed requests?
|
||||
|
||||
Streamed requests won't have any added latency. The SDK forwards each streamed token as it's received from the server while
|
||||
simultaneously collecting it in the response it will report to your OpenPipe instance once the entire response has been received.
|
||||
|
||||
#### Your OpenAI key never leaves your machine.
|
||||
|
||||
Calls to OpenAI are carried out by our SDK **on your machine**, meaning that your API key is secure, and you'll
|
||||
continue getting uninterrupted inference even if your OpenPipe instance goes down.
|
||||
|
||||
## <br />
|
||||
|
||||
### Want to dig deeper? Take a peek in our open-source code.
|
||||
|
||||
We benefit from a growing community of developers and customers who are
|
||||
dedicated to improving the OpenPipe experience. Our [open source repo](https://github.com/openpipe/openpipe)
|
||||
is an opportunity for developers to confirm the quality of our offering
|
||||
and to make improvements when they can.
|
||||
BIN
docs/favicon.webp
Normal file
|
After Width: | Height: | Size: 490 B |
8
docs/features/experiments.mdx
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
title: "Experiments"
|
||||
description: "
|
||||
Template multiple scenarios into combinations of prompts and models to compare their output. Use flexible regex and GPT-4 evaluations to assess completion quality.
|
||||
Quickly iterate and spot model shortcomings before deployment."
|
||||
---
|
||||
|
||||
<Frame></Frame>
|
||||
8
docs/features/exporting-data.mdx
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
title: "Export Data - Beta"
|
||||
sidebarTitle: "Export Data"
|
||||
description: "
|
||||
Export your past requests as a JSONL file in an Alpaca or OpenAI fine-tuning format or in their raw form."
|
||||
---
|
||||
|
||||
<Frame></Frame>
|
||||
8
docs/features/fine-tuning.mdx
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
title: "Fine Tuning - Beta"
|
||||
sidebarTitle: "Fine Tuning"
|
||||
description: "
|
||||
Fine tune your data on specific logs. Filter by prompt id and exclude requests with an undesirable output."
|
||||
---
|
||||
|
||||
<Frame></Frame>
|
||||
7
docs/features/log-filters.mdx
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Log Filters"
|
||||
description: "
|
||||
Search and filter your past LLM requests to inspect your responses and build a training dataset."
|
||||
---
|
||||
|
||||
<Frame></Frame>
|
||||
114
docs/getting-started/openpipe-sdk.mdx
Normal file
@@ -0,0 +1,114 @@
|
||||
---
|
||||
title: "Installing the SDK"
|
||||
---
|
||||
|
||||
Use the OpenPipe SDK as a drop-in replacement for the generic OpenAI package. We currently support logging OpenAI calls and support for more LLM providers will be added soon.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Python">
|
||||
|
||||
Find the SDK at https://pypi.org/project/openpipe/
|
||||
|
||||
## Simple Integration
|
||||
|
||||
Add `OPENPIPE_API_KEY` to your environment variables.
|
||||
|
||||
```bash
|
||||
export OPENPIPE_API_KEY=opk-<your-api-key>
|
||||
# Or you can set it in your code, as shown in the example below
|
||||
```
|
||||
|
||||
Replace this line
|
||||
|
||||
```python
|
||||
from openai import openai
|
||||
```
|
||||
|
||||
with this one
|
||||
|
||||
```python
|
||||
from openpipe import openai
|
||||
```
|
||||
|
||||
## Adding Searchable Tags
|
||||
|
||||
OpenPipe has a concept of "tagging." This is very useful for grouping a certain set of completions together.
|
||||
When you're using a dataset for fine-tuning, you can select all the prompts that match a certain set of tags. Here's how you can use the tagging feature:
|
||||
|
||||
```python
|
||||
from openpipe import openai, configure_openpipe
|
||||
import os
|
||||
|
||||
# If you have the `OPENPIPE_API_KEY` environment variable set
|
||||
# we'll read from it by default.
|
||||
configure_openpipe(api_key=os.getenv("OPENPIPE_API_KEY"))
|
||||
|
||||
# Configure OpenAI the same way you would normally
|
||||
openai.api_key = os.getenv("OPENAI_API_KEY")
|
||||
|
||||
completion = openai.ChatCompletion.create(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[{"role": "system", "content": "count to 10"}],
|
||||
openpipe={"tags": {"prompt_id": "counting", "any_key": "any_value"}},
|
||||
)
|
||||
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="NodeJS">
|
||||
|
||||
Find the SDK at https://www.npmjs.com/package/openpipe
|
||||
|
||||
## Simple Integration
|
||||
|
||||
Add `OPENPIPE_API_KEY` to your environment variables.
|
||||
|
||||
```bash
|
||||
export OPENPIPE_API_KEY=opk-<your-api-key>
|
||||
# Or you can set it in your code, as shown in the example below
|
||||
```
|
||||
|
||||
Replace this line
|
||||
|
||||
```typescript
|
||||
import OpenAI from "openai";
|
||||
```
|
||||
|
||||
with this one
|
||||
|
||||
```typescript
|
||||
import OpenAI from "openpipe/openai";
|
||||
```
|
||||
|
||||
## Adding Searchable Tags
|
||||
|
||||
OpenPipe has a concept of "tagging." This is very useful for grouping a certain set of completions together.
|
||||
When you're using a dataset for fine-tuning, you can select all the prompts that match a certain set of tags. Here's how you can use the tagging feature:
|
||||
|
||||
```typescript
|
||||
// Fully compatible with original OpenAI initialization
|
||||
const openai = new OpenAI({
|
||||
apiKey: "my api key", // defaults to process.env["OPENAI_API_KEY"]
|
||||
// openpipe key is optional
|
||||
openpipe: {
|
||||
apiKey: "my api key", // defaults to process.env["OPENPIPE_API_KEY"]
|
||||
baseUrl: "my url", // defaults to process.env["OPENPIPE_BASE_URL"] or https://app.openpipe.ai/api/v1 if not set
|
||||
},
|
||||
});
|
||||
|
||||
const completion = await openai.chat.completions.create({
|
||||
messages: [{ role: "user", content: "Count to 10" }],
|
||||
model: "gpt-3.5-turbo",
|
||||
// optional
|
||||
openpipe: {
|
||||
// Add custom searchable tags
|
||||
tags: {
|
||||
prompt_id: "counting",
|
||||
any_key: "any_value",
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
35
docs/getting-started/quick-start.mdx
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
title: "Quick Start"
|
||||
description: "Get started with OpenPipe in a few quick steps."
|
||||
---
|
||||
|
||||
## Step 1: Create your OpenPipe Account
|
||||
|
||||
If you don't already have one, create an account with OpenPipe at https://app.openpipe.ai/. You can sign up with GitHub, so you don't need to remember an extra password.
|
||||
|
||||
## Step 2: Find your Project API key
|
||||
|
||||
In order to capture your calls and fine-tune a model on them, we need an API key to authenticate you and determine which project to store your logs under.
|
||||
|
||||
<Note>
|
||||
When you created your account, a project was automatically configured for you as well. Find its
|
||||
API key at https://app.openpipe.ai/project/settings.
|
||||
</Note>
|
||||
|
||||
## Step 3: Integrate the OpenPipe SDK
|
||||
|
||||
You're done with the hard part! Learn how to integrate the OpenPipe SDK on the next page.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card
|
||||
title="OpenPipe SDK"
|
||||
icon={
|
||||
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>OpenPipe</title>
|
||||
<path d="M22.2819 9.8211a5.9847 5.9847 0 0 0-.5157-4.9108 6.0462 6.0462 0 0 0-6.5098-2.9A6.0651 6.0651 0 0 0 4.9807 4.1818a5.9847 5.9847 0 0 0-3.9977 2.9 6.0462 6.0462 0 0 0 .7427 7.0966 5.98 5.98 0 0 0 .511 4.9107 6.051 6.051 0 0 0 6.5146 2.9001A5.9847 5.9847 0 0 0 13.2599 24a6.0557 6.0557 0 0 0 5.7718-4.2058 5.9894 5.9894 0 0 0 3.9977-2.9001 6.0557 6.0557 0 0 0-.7475-7.0729zm-9.022 12.6081a4.4755 4.4755 0 0 1-2.8764-1.0408l.1419-.0804 4.7783-2.7582a.7948.7948 0 0 0 .3927-.6813v-6.7369l2.02 1.1686a.071.071 0 0 1 .038.052v5.5826a4.504 4.504 0 0 1-4.4945 4.4944zm-9.6607-4.1254a4.4708 4.4708 0 0 1-.5346-3.0137l.142.0852 4.783 2.7582a.7712.7712 0 0 0 .7806 0l5.8428-3.3685v2.3324a.0804.0804 0 0 1-.0332.0615L9.74 19.9502a4.4992 4.4992 0 0 1-6.1408-1.6464zM2.3408 7.8956a4.485 4.485 0 0 1 2.3655-1.9728V11.6a.7664.7664 0 0 0 .3879.6765l5.8144 3.3543-2.0201 1.1685a.0757.0757 0 0 1-.071 0l-4.8303-2.7865A4.504 4.504 0 0 1 2.3408 7.872zm16.5963 3.8558L13.1038 8.364 15.1192 7.2a.0757.0757 0 0 1 .071 0l4.8303 2.7913a4.4944 4.4944 0 0 1-.6765 8.1042v-5.6772a.79.79 0 0 0-.407-.667zm2.0107-3.0231l-.142-.0852-4.7735-2.7818a.7759.7759 0 0 0-.7854 0L9.409 9.2297V6.8974a.0662.0662 0 0 1 .0284-.0615l4.8303-2.7866a4.4992 4.4992 0 0 1 6.6802 4.66zM8.3065 12.863l-2.02-1.1638a.0804.0804 0 0 1-.038-.0567V6.0742a4.4992 4.4992 0 0 1 7.3757-3.4537l-.142.0805L8.704 5.459a.7948.7948 0 0 0-.3927.6813zm1.0976-2.3654l2.602-1.4998 2.6069 1.4998v2.9994l-2.5974 1.4997-2.6067-1.4997Z" />
|
||||
</svg>
|
||||
}
|
||||
iconType="duotone"
|
||||
href="/getting-started/openpipe-sdk"
|
||||
></Card>
|
||||
</CardGroup>
|
||||
BIN
docs/images/features/experiments.png
Normal file
|
After Width: | Height: | Size: 416 KiB |
BIN
docs/images/features/exporting-data.png
Normal file
|
After Width: | Height: | Size: 414 KiB |
BIN
docs/images/features/fine-tuning.png
Normal file
|
After Width: | Height: | Size: 404 KiB |
BIN
docs/images/features/log-filters.png
Normal file
|
After Width: | Height: | Size: 321 KiB |
BIN
docs/images/intro/request-logs.png
Normal file
|
After Width: | Height: | Size: 390 KiB |
18
docs/introduction.mdx
Normal file
@@ -0,0 +1,18 @@
|
||||
---
|
||||
title: "OpenPipe Documentation"
|
||||
sidebarTitle: "Introduction"
|
||||
description: "
|
||||
Product-focused teams use OpenPipe's seamless fine-tuning and monitoring services to decrease the cost and latency of their LLM operations.
|
||||
You can use OpenPipe to collect and analyze LLM logs, create fine-tuned models, and compare output from multiple models given the same input."
|
||||
---
|
||||
|
||||
<Frame></Frame>
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Get Started" icon="code">
|
||||
Quickly integrate the OpenPipe SDK into your application and start collecting data.
|
||||
</Card>
|
||||
<Card title="Features" icon="lightbulb">
|
||||
View the platform features OpenPipe provides and learn how to use them.
|
||||
</Card>
|
||||
</CardGroup>
|
||||
25
docs/logo/dark.svg
Normal file
|
After Width: | Height: | Size: 8.3 KiB |
25
docs/logo/light.svg
Normal file
|
After Width: | Height: | Size: 8.3 KiB |
65
docs/mint.json
Normal file
@@ -0,0 +1,65 @@
|
||||
{
|
||||
"name": "OpenPipe",
|
||||
"logo": {
|
||||
"light": "/logo/light.svg",
|
||||
"dark": "/logo/dark.svg"
|
||||
},
|
||||
"favicon": "/favicon.webp",
|
||||
"colors": {
|
||||
"primary": "#FF5733",
|
||||
"light": "#FF5733",
|
||||
"dark": "#FF5733"
|
||||
},
|
||||
"modeToggle": {
|
||||
"default": "light"
|
||||
},
|
||||
"topbarCtaButton": {
|
||||
"name": "Sign In",
|
||||
"url": "https://app.openpipe.ai"
|
||||
},
|
||||
"anchors": [
|
||||
{
|
||||
"name": "GitHub",
|
||||
"icon": "github",
|
||||
"url": "https://github.com/openpipe/openpipe"
|
||||
}
|
||||
],
|
||||
"feedback": {
|
||||
"suggestEdit": true,
|
||||
"raiseIssue": true
|
||||
},
|
||||
"navigation": [
|
||||
{
|
||||
"group": "Welcome",
|
||||
"pages": ["introduction", "overview"]
|
||||
},
|
||||
{
|
||||
"group": "Getting Started",
|
||||
"pages": ["getting-started/quick-start", "getting-started/openpipe-sdk"]
|
||||
},
|
||||
{
|
||||
"group": "Features",
|
||||
"pages": [
|
||||
"features/log-filters",
|
||||
"features/exporting-data",
|
||||
"features/fine-tuning",
|
||||
"features/experiments"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "FAQ",
|
||||
"pages": ["faq/how-reporting-works"]
|
||||
}
|
||||
],
|
||||
"topbarLinks": [
|
||||
{
|
||||
"name": "Github",
|
||||
"url": "https://github.com/OpenPipe/OpenPipe"
|
||||
}
|
||||
],
|
||||
"footerSocials": {
|
||||
"twitter": "https://twitter.com/OpenPipeAI",
|
||||
"linkedin": "https://www.linkedin.com/company/openpipe/about/",
|
||||
"github": "https://github.com/OpenPipe/OpenPipe"
|
||||
}
|
||||
}
|
||||
30
docs/overview.mdx
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
title: "Overview"
|
||||
description: "OpenPipe is a streamlined platform designed to help product-focused teams train specialized LLM models as replacements for slow and expensive prompts."
|
||||
---
|
||||
|
||||
## Who We Are
|
||||
|
||||
We're a team of full-stack engineers and machine learning researchers working to streamline the process of integrating fine-tuned models into any application. Our goal is to make the fine-tuning process accessible to everyone.
|
||||
|
||||
## What We Provide
|
||||
|
||||
Here are a few of the features we offer:
|
||||
|
||||
- **Data Capture**: OpenPipe automatically captures every request and response sent through our drop-in replacement sdk and stores it for your future use.
|
||||
|
||||
- **Monitoring**: OpenPipe provides intuitive tools to view the frequency and cost of your LLM requests, and provides a special tool for viewing requests with error status codes.
|
||||
|
||||
- **Searchable Logs**: We enable you to search your past requests, and provide a simple protocol for tagging them by prompt id for easy filtering.
|
||||
|
||||
- **Fine-Tuning**: With all your LLM requests and responses in one place, it's easy to select the data you want to fine-tune on and kick off a job.
|
||||
|
||||
- **Model Hosting**: After we've trained your model, OpenPipe will automatically begin hosting it. Accessing your model will require an API key from your project.
|
||||
|
||||
- **Unified SDK**: Switching requests from your previous LLM provider to your new model is as simple as changing the model name. All our models implement the OpenAI inference format, so you won't have to change how you parse its response.
|
||||
|
||||
- **Data Export**: OpenPipe allows you to download your request logs or the fine-tuned models you've trained at any time for easy self-hosting.
|
||||
|
||||
- **Experimentation**: The fine-tunes you've created on OpenPipe are immediately available for you to run inference on in our experimentation playground.
|
||||
|
||||
Welcome to the OpenPipe community!
|
||||
@@ -1,123 +0,0 @@
|
||||
# %% [markdown]
|
||||
# I'm pretty happy with my model's accuracy relative to GPT-4. How does it compare cost-wise?
|
||||
#
|
||||
# I'll really push this to its limits -- let's see how quickly our poor model can classify the [full 2-million-recipe dataset](https://huggingface.co/datasets/corbt/all-recipes) 😈.
|
||||
|
||||
# %%
|
||||
|
||||
# %%
|
||||
from datasets import load_dataset
|
||||
|
||||
all_recipes = load_dataset("corbt/all-recipes")["train"]["input"]
|
||||
|
||||
print(f"Number of recipes: {len(all_recipes):,}")
|
||||
|
||||
|
||||
# %%
|
||||
from vllm import LLM, SamplingParams
|
||||
|
||||
llm = LLM(model="./models/run1/merged", max_num_batched_tokens=4096)
|
||||
|
||||
sampling_params = SamplingParams(
|
||||
# 120 should be fine for the work we're doing here.
|
||||
max_tokens=120,
|
||||
# This is a deterministic task so temperature=0 is best.
|
||||
temperature=0,
|
||||
)
|
||||
|
||||
|
||||
# %%
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
|
||||
BATCH_SIZE = 10000
|
||||
start_time = time.time()
|
||||
print(f"Start time: {start_time}")
|
||||
|
||||
for i in range(0, len(all_recipes), BATCH_SIZE):
|
||||
# File name for the current batch
|
||||
file_name = f"./data/benchmark_batch_{int(i/BATCH_SIZE)}.txt"
|
||||
|
||||
# Check if the file already exists; if so, skip to the next batch
|
||||
if os.path.exists(file_name):
|
||||
print(f"File {file_name} exists, skipping recipes {i:,} to {i+BATCH_SIZE:,}...")
|
||||
continue
|
||||
|
||||
print(f"Processing recipes {i:,} to {i+BATCH_SIZE:,}...")
|
||||
outputs = llm.generate(
|
||||
all_recipes[i : i + BATCH_SIZE], sampling_params=sampling_params
|
||||
)
|
||||
|
||||
outputs = [o.outputs[0].text for o in outputs]
|
||||
|
||||
# Write the generated outputs to the file as a JSON list
|
||||
json.dump(outputs, open(file_name, "w"))
|
||||
|
||||
end_time = time.time()
|
||||
print(f"End time: {end_time}")
|
||||
print(f"Total hours: {((end_time - start_time) / 3600):.2f}")
|
||||
|
||||
|
||||
# %% [markdown]
|
||||
# Nice! I've processed all 2,147,248 recipes in under 17 hours. Let's do a cost comparison with GPT-3.5 and GPT-4. I'll use the GPT-4 latency/cost numbers based on the 5000 samples used to generate our model's training data.
|
||||
|
||||
# %%
|
||||
import pandas as pd
|
||||
|
||||
# I used an on-demand Nvidia L40 on RunPod for this, at an hourly cost of $1.14.
|
||||
finetuned_hourly_cost = 1.14
|
||||
|
||||
finetuned_total_hours = 17
|
||||
|
||||
finetuned_avg_cost = finetuned_hourly_cost * finetuned_total_hours / len(all_recipes)
|
||||
|
||||
# The average input and output tokens calculated by OpenAI, based on the 5000 recipes I sent them
|
||||
avg_input_tokens = 276
|
||||
avg_output_tokens = 42
|
||||
|
||||
# Token pricing from https://openai.com/pricing
|
||||
gpt_4_avg_cost = avg_input_tokens * 0.03 / 1000 + avg_output_tokens * 0.06 / 1000
|
||||
|
||||
gpt_35_avg_cost = avg_input_tokens * 0.0015 / 1000 + avg_output_tokens * 0.0016 / 1000
|
||||
|
||||
gpt_35_finetuned_avg_cost = (
|
||||
avg_input_tokens * 0.012 / 1000 + avg_output_tokens * 0.016 / 1000 + 0.06 / 1000
|
||||
)
|
||||
|
||||
# Multiply the number of recipes
|
||||
# gpt_4_cost = len(all_recipes) * gpt_4_avg_cost
|
||||
# gpt_35_cost = len(all_recipes) * gpt_35_avg_cost
|
||||
# gpt_35_finetuned_cost = len(all_recipes) * gpt_35_finetuned_avg_cost
|
||||
|
||||
# Let's put this in a dataframe for easier comparison.
|
||||
|
||||
costs = pd.DataFrame(
|
||||
{
|
||||
"Model": [
|
||||
"Llama 2 7B (finetuned)",
|
||||
"GPT-3.5",
|
||||
"GPT-3.5 (finetuned)",
|
||||
"GPT-4",
|
||||
],
|
||||
"Cost to Classify One Recipe": [
|
||||
finetuned_avg_cost,
|
||||
gpt_35_avg_cost,
|
||||
gpt_35_finetuned_avg_cost,
|
||||
gpt_4_avg_cost,
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
costs["Cost to Classify Entire Dataset"] = (
|
||||
costs["Cost to Classify One Recipe"] * len(all_recipes)
|
||||
).map(lambda x: f"{x:,.2f}")
|
||||
|
||||
|
||||
costs
|
||||
|
||||
|
||||
# %% [markdown]
|
||||
# ...and just for fun, let's figure out how many recipes my pescatarian basement-dwelling brother can make! 😂
|
||||
|
||||
# %%
|
||||
18
pnpm-lock.yaml
generated
@@ -174,7 +174,10 @@ importers:
|
||||
specifier: 4.0.0-beta.7
|
||||
version: 4.0.0-beta.7(encoding@0.1.13)
|
||||
openpipe:
|
||||
specifier: workspace:*
|
||||
specifier: ^0.3.0
|
||||
version: 0.3.0
|
||||
openpipe-dev:
|
||||
specifier: workspace:^
|
||||
version: link:../client-libs/typescript
|
||||
pg:
|
||||
specifier: ^8.11.2
|
||||
@@ -7247,6 +7250,19 @@ packages:
|
||||
oidc-token-hash: 5.0.3
|
||||
dev: false
|
||||
|
||||
/openpipe@0.3.0:
|
||||
resolution: {integrity: sha512-0hhk3Aq0kUxzvNb36vm9vssxMHYZvgJOg5wKeepRhVthW4ygBWftHZjR4PHyOtvjcRmnJ/v4h8xd/IINu5ypnQ==}
|
||||
dependencies:
|
||||
encoding: 0.1.13
|
||||
form-data: 4.0.0
|
||||
lodash-es: 4.17.21
|
||||
node-fetch: 2.6.12(encoding@0.1.13)
|
||||
openai-beta: /openai@4.0.0-beta.7(encoding@0.1.13)
|
||||
openai-legacy: /openai@3.3.0
|
||||
transitivePeerDependencies:
|
||||
- debug
|
||||
dev: false
|
||||
|
||||
/optionator@0.9.3:
|
||||
resolution: {integrity: sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
|
||||