Compare commits

..

8 Commits

Author SHA1 Message Date
Kyle Corbitt
754e273049 Python package improvements
Added an endpoint for getting the actual stored responses, and used it to test and improve the python package.
2023-08-14 19:07:03 -07:00
Kyle Corbitt
c4cef35717 Move the external API into its own router
Auth logic isn't shared between the clients anyway, so co-locating them is confusing since you can't use the same clients to call both. This also makes the codegen clients less verbose.
2023-08-14 16:56:50 -07:00
Kyle Corbitt
8552baf632 Merge pull request #154 from OpenPipe/broken-page
Cap the number of waiting messages we try to render
2023-08-14 15:47:48 -07:00
Kyle Corbitt
f41e2229ca Cap the number of waiting messages we try to render
If an cell was attempted several hours ago and never resolved, it crashes the UI because we try to render thousands of log messages once a second (eg. https://app.openpipe.ai/experiments/372d0827-186e-4a7d-a8a6-1bf7050eb5fd) We should probably have a different UI for cells that have hung for a long time to let you know you should just retry, but this quick fix should work for now.
2023-08-14 15:44:03 -07:00
arcticfly
e649f42c9c Await completions (#153)
* Continue polling stats while waiting for completions to finish

* Clarify convert to function call instructions
2023-08-14 13:03:48 -07:00
Kyle Corbitt
99f305483b Merge pull request #150 from OpenPipe/fix-build
(Probably) fixes the build
2023-08-14 07:59:20 -07:00
arcticfly
b28f4cad57 Remove scenarios header from output table card (#151) 2023-08-13 03:26:58 -07:00
Kyle Corbitt
df4a3a0950 (Probably) fixes the build
This probably fixes the build that I broke in https://github.com/OpenPipe/OpenPipe/pull/149. However, there's a small chance that it fixes it enough to deploy, but not enough to actually work. That would be bad, so not merging until I have time to monitor the deploy.
2023-08-12 23:50:31 -07:00
45 changed files with 1143 additions and 328 deletions

5
.dockerignore Normal file
View File

@@ -0,0 +1,5 @@
**/node_modules/
.git
**/.venv/
**/.env*
**/.next/

View File

@@ -32,5 +32,5 @@ NEXT_PUBLIC_HOST="http://localhost:3000"
GITHUB_CLIENT_ID="your_client_id"
GITHUB_CLIENT_SECRET="your_secret"
OPENPIPE_BASE_URL="http://localhost:3000/api"
OPENPIPE_BASE_URL="http://localhost:3000/api/v1"
OPENPIPE_API_KEY="your_key"

View File

@@ -12,12 +12,11 @@ declare module "nextjs-routes" {
export type Route =
| StaticRoute<"/account/signin">
| DynamicRoute<"/api/[...trpc]", { "trpc": string[] }>
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
| StaticRoute<"/api/experiments/og-image">
| StaticRoute<"/api/openapi">
| StaticRoute<"/api/sentry-example-api">
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
| DynamicRoute<"/api/v1/[...trpc]", { "trpc": string[] }>
| StaticRoute<"/api/v1/openapi">
| StaticRoute<"/dashboard">
| DynamicRoute<"/data/[id]", { "id": string }>
| StaticRoute<"/data">

View File

@@ -6,13 +6,13 @@ RUN yarn global add pnpm
# DEPS
FROM base as deps
WORKDIR /app
WORKDIR /code
COPY prisma ./
COPY app/prisma app/package.json ./app/
COPY client-libs/typescript/package.json ./client-libs/typescript/
COPY pnpm-lock.yaml pnpm-workspace.yaml ./
COPY package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile
RUN cd app && pnpm install --frozen-lockfile
# BUILDER
FROM base as builder
@@ -25,22 +25,24 @@ ARG NEXT_PUBLIC_SENTRY_DSN
ARG SENTRY_AUTH_TOKEN
ARG NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
WORKDIR /code
COPY --from=deps /code/node_modules ./node_modules
COPY --from=deps /code/app/node_modules ./app/node_modules
COPY --from=deps /code/client-libs/typescript/node_modules ./client-libs/typescript/node_modules
COPY . .
RUN SKIP_ENV_VALIDATION=1 pnpm build
RUN cd app && SKIP_ENV_VALIDATION=1 pnpm build
# RUNNER
FROM base as runner
WORKDIR /app
WORKDIR /code/app
ENV NODE_ENV production
ENV NEXT_TELEMETRY_DISABLED 1
COPY --from=builder /app/ ./
COPY --from=builder /code/ /code/
EXPOSE 3000
ENV PORT 3000
# Run the "run-prod.sh" script
CMD /app/run-prod.sh
CMD /code/app/run-prod.sh

View File

@@ -112,17 +112,17 @@ model ScenarioVariantCell {
model ModelResponse {
id String @id @default(uuid()) @db.Uuid
cacheKey String
requestedAt DateTime?
receivedAt DateTime?
respPayload Json?
cost Float?
inputTokens Int?
outputTokens Int?
statusCode Int?
errorMessage String?
retryTime DateTime?
outdated Boolean @default(false)
cacheKey String
requestedAt DateTime?
receivedAt DateTime?
respPayload Json?
cost Float?
inputTokens Int?
outputTokens Int?
statusCode Int?
errorMessage String?
retryTime DateTime?
outdated Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -273,8 +273,8 @@ model LoggedCall {
projectId String @db.Uuid
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
model String?
tags LoggedCallTag[]
model String?
tags LoggedCallTag[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -295,7 +295,7 @@ model LoggedCallModelResponse {
errorMessage String?
requestedAt DateTime
receivedAt DateTime
receivedAt DateTime
// Note: the function to calculate the cacheKey should include the project
// ID so we don't share cached responses between projects, which could be an
@@ -340,8 +340,8 @@ model ApiKey {
name String
apiKey String @unique
projectId String @db.Uuid
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt

View File

@@ -2,6 +2,7 @@ import { prisma } from "~/server/db";
import dedent from "dedent";
import { generateNewCell } from "~/server/utils/generateNewCell";
import { promptConstructorVersion } from "~/promptConstructor/version";
import { env } from "~/env.mjs";
const defaultId = "11111111-1111-1111-1111-111111111111";
@@ -16,6 +17,16 @@ const project =
data: { id: defaultId },
}));
if (env.OPENPIPE_API_KEY) {
await prisma.apiKey.create({
data: {
projectId: project.id,
name: "Default API Key",
apiKey: env.OPENPIPE_API_KEY,
},
});
}
await prisma.experiment.deleteMany({
where: {
id: defaultId,

View File

@@ -33,7 +33,7 @@ export default function OutputCell({
if (!templateHasVariables) disabledReason = "Add a value to the scenario variables to see output";
const [refetchInterval, setRefetchInterval] = useState(0);
const [refetchInterval, setRefetchInterval] = useState<number | false>(false);
const { data: cell, isLoading: queryLoading } = api.scenarioVariantCells.get.useQuery(
{ scenarioId: scenario.id, variantId: variant.id },
{ refetchInterval },
@@ -64,7 +64,8 @@ export default function OutputCell({
cell.retrievalStatus === "PENDING" ||
cell.retrievalStatus === "IN_PROGRESS" ||
hardRefetching;
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : 0), [awaitingOutput]);
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : false), [awaitingOutput]);
// TODO: disconnect from socket if we're not streaming anymore
const streamedMessage = useSocket<OutputSchema>(cell?.id);
@@ -120,8 +121,13 @@ export default function OutputCell({
? response.receivedAt.getTime()
: Date.now();
if (response.requestedAt) {
numWaitingMessages = Math.floor(
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
numWaitingMessages = Math.min(
Math.floor(
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
),
// Don't try to render more than 15, it'll use too much CPU and
// break the page
15,
);
}
return (

View File

@@ -21,14 +21,18 @@ export default function VariantStats(props: { variant: PromptVariant }) {
outputTokens: 0,
scenarioCount: 0,
outputCount: 0,
awaitingCompletions: false,
awaitingEvals: false,
},
refetchInterval,
},
);
// Poll every two seconds while we are waiting for LLM retrievals to finish
useEffect(() => setRefetchInterval(data.awaitingEvals ? 5000 : 0), [data.awaitingEvals]);
// Poll every five seconds while we are waiting for LLM retrievals to finish
useEffect(
() => setRefetchInterval(data.awaitingCompletions || data.awaitingEvals ? 5000 : 0),
[data.awaitingCompletions, data.awaitingEvals],
);
const [passColor, neutralColor, failColor] = useToken("colors", [
"green.500",

View File

@@ -120,9 +120,9 @@ export const refinementActions: Record<string, RefinementAction> = {
"Convert to function call": {
icon: TfiThought,
description: "Use function calls to get output from the model in a more structured way.",
instructions: `OpenAI functions are a specialized way for an LLM to return output.
instructions: `OpenAI functions are a specialized way for an LLM to return its final output.
This is what a prompt looks like before adding a function:
Example 1 before:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
@@ -139,7 +139,7 @@ export const refinementActions: Record<string, RefinementAction> = {
],
});
This is what one looks like after adding a function:
Example 1 after:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
@@ -156,7 +156,7 @@ export const refinementActions: Record<string, RefinementAction> = {
],
functions: [
{
name: "extract_sentiment",
name: "log_extracted_sentiment",
parameters: {
type: "object", // parameters must always be an object with a properties key
properties: { // properties key is required
@@ -169,13 +169,13 @@ export const refinementActions: Record<string, RefinementAction> = {
},
],
function_call: {
name: "extract_sentiment",
name: "log_extracted_sentiment",
},
});
Here's another example of adding a function:
Before:
=========
Example 2 before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -197,7 +197,7 @@ export const refinementActions: Record<string, RefinementAction> = {
temperature: 0,
});
After:
Example 2 after:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -215,7 +215,7 @@ export const refinementActions: Record<string, RefinementAction> = {
temperature: 0,
functions: [
{
name: "score_post",
name: "log_post_score",
parameters: {
type: "object",
properties: {
@@ -227,13 +227,13 @@ export const refinementActions: Record<string, RefinementAction> = {
},
],
function_call: {
name: "score_post",
name: "log_post_score",
},
});
Another example
=========
Before:
Example 3 before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -246,7 +246,7 @@ export const refinementActions: Record<string, RefinementAction> = {
],
});
After:
Example 3 after:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -258,21 +258,24 @@ export const refinementActions: Record<string, RefinementAction> = {
],
functions: [
{
name: "write_in_language",
name: "log_translated_text",
parameters: {
type: "object",
properties: {
text: {
translated_text: {
type: "string",
description: "The text, written in the language specified in the prompt",
},
},
},
},
],
function_call: {
name: "write_in_language",
name: "log_translated_text",
},
});
=========
Add an OpenAI function that takes one or more nested parameters that match the expected output from this prompt.`,
},

View File

@@ -1,6 +0,0 @@
// A faulty API route to test Sentry's error monitoring
// @ts-expect-error just a test file, don't care about types
export default function handler(_req, res) {
throw new Error("Sentry Example API Route Error");
res.status(200).json({ name: "John Doe" });
}

View File

@@ -1,17 +1,14 @@
import { type NextApiRequest, type NextApiResponse } from "next";
import cors from "nextjs-cors";
import { createOpenApiNextHandler } from "trpc-openapi";
import { createProcedureCache } from "trpc-openapi/dist/adapters/node-http/procedures";
import { appRouter } from "~/server/api/root.router";
import { createTRPCContext } from "~/server/api/trpc";
import { v1ApiRouter } from "~/server/api/external/v1Api.router";
import { createOpenApiContext } from "~/server/api/external/openApiTrpc";
const openApiHandler = createOpenApiNextHandler({
router: appRouter,
createContext: createTRPCContext,
router: v1ApiRouter,
createContext: createOpenApiContext,
});
const cache = createProcedureCache(appRouter);
const handler = async (req: NextApiRequest, res: NextApiResponse) => {
// Setup CORS
await cors(req, res);

View File

@@ -1,12 +1,12 @@
import { type NextApiRequest, type NextApiResponse } from "next";
import { generateOpenApiDocument } from "trpc-openapi";
import { appRouter } from "~/server/api/root.router";
import { v1ApiRouter } from "~/server/api/external/v1Api.router";
export const openApiDocument = generateOpenApiDocument(appRouter, {
export const openApiDocument = generateOpenApiDocument(v1ApiRouter, {
title: "OpenPipe API",
description: "The public API for reporting API calls to OpenPipe",
version: "0.1.0",
baseUrl: "https://app.openpipe.ai/api",
version: "0.1.1",
baseUrl: "https://app.openpipe.ai/api/v1",
});
// Respond with our OpenAPI schema
const hander = (req: NextApiRequest, res: NextApiResponse) => {

View File

@@ -0,0 +1,95 @@
import type { ApiKey, Project } from "@prisma/client";
import { TRPCError, initTRPC } from "@trpc/server";
import { type CreateNextContextOptions } from "@trpc/server/adapters/next";
import superjson from "superjson";
import { type OpenApiMeta } from "trpc-openapi";
import { ZodError } from "zod";
import { prisma } from "~/server/db";
type CreateContextOptions = {
key:
| (ApiKey & {
project: Project;
})
| null;
};
/**
* This helper generates the "internals" for a tRPC context. If you need to use it, you can export
* it from here.
*
* Examples of things you may need it for:
* - testing, so we don't have to mock Next.js' req/res
* - tRPC's `createSSGHelpers`, where we don't have req/res
*
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
*/
export const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
key: opts.key,
};
};
export const createOpenApiContext = async (opts: CreateNextContextOptions) => {
const { req, res } = opts;
const apiKey = req.headers.authorization?.split(" ")[1] as string | null;
if (!apiKey) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const key = await prisma.apiKey.findUnique({
where: { apiKey },
include: { project: true },
});
if (!key) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return createInnerTRPCContext({
key,
});
};
export type TRPCContext = Awaited<ReturnType<typeof createOpenApiContext>>;
const t = initTRPC
.context<typeof createOpenApiContext>()
.meta<OpenApiMeta>()
.create({
transformer: superjson,
errorFormatter({ shape, error }) {
return {
...shape,
data: {
...shape.data,
zodError: error.cause instanceof ZodError ? error.cause.flatten() : null,
},
};
},
});
export const createOpenApiRouter = t.router;
export const openApiPublicProc = t.procedure;
/** Reusable middleware that enforces users are logged in before running the procedure. */
const enforceApiKey = t.middleware(async ({ ctx, next }) => {
if (!ctx.key) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return next({
ctx: { key: ctx.key },
});
});
/**
* Protected (authenticated) procedure
*
* If you want a query or mutation to ONLY be accessible to logged in users, use this. It verifies
* the session is valid and guarantees `ctx.session.user` is not null.
*
* @see https://trpc.io/docs/procedures
*/
export const openApiProtectedProc = t.procedure.use(enforceApiKey);

View File

@@ -2,9 +2,6 @@ import { type Prisma } from "@prisma/client";
import { type JsonValue } from "type-fest";
import { z } from "zod";
import { v4 as uuidv4 } from "uuid";
import { TRPCError } from "@trpc/server";
import { createTRPCRouter, publicProcedure } from "~/server/api/trpc";
import { prisma } from "~/server/db";
import { hashRequest } from "~/server/utils/hashObject";
import modelProvider from "~/modelProviders/openai-ChatCompletion";
@@ -12,6 +9,7 @@ import {
type ChatCompletion,
type CompletionCreateParams,
} from "openai/resources/chat/completions";
import { createOpenApiRouter, openApiProtectedProc } from "./openApiTrpc";
const reqValidator = z.object({
model: z.string(),
@@ -28,12 +26,12 @@ const respValidator = z.object({
),
});
export const externalApiRouter = createTRPCRouter({
checkCache: publicProcedure
export const v1ApiRouter = createOpenApiRouter({
checkCache: openApiProtectedProc
.meta({
openapi: {
method: "POST",
path: "/v1/check-cache",
path: "/check-cache",
description: "Check if a prompt is cached",
protect: true,
},
@@ -47,7 +45,8 @@ export const externalApiRouter = createTRPCRouter({
.optional()
.describe(
'Extra tags to attach to the call for filtering. Eg { "userId": "123", "promptId": "populate-title" }',
),
)
.default({}),
}),
)
.output(
@@ -56,18 +55,8 @@ export const externalApiRouter = createTRPCRouter({
}),
)
.mutation(async ({ input, ctx }) => {
const apiKey = ctx.apiKey;
if (!apiKey) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const key = await prisma.apiKey.findUnique({
where: { apiKey },
});
if (!key) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const reqPayload = await reqValidator.spa(input.reqPayload);
const cacheKey = hashRequest(key.projectId, reqPayload as JsonValue);
const cacheKey = hashRequest(ctx.key.projectId, reqPayload as JsonValue);
const existingResponse = await prisma.loggedCallModelResponse.findFirst({
where: { cacheKey },
@@ -79,23 +68,24 @@ export const externalApiRouter = createTRPCRouter({
await prisma.loggedCall.create({
data: {
projectId: key.projectId,
projectId: ctx.key.projectId,
requestedAt: new Date(input.requestedAt),
cacheHit: true,
modelResponseId: existingResponse.id,
},
});
await createTags(existingResponse.originalLoggedCallId, input.tags);
return {
respPayload: existingResponse.respPayload,
};
}),
report: publicProcedure
report: openApiProtectedProc
.meta({
openapi: {
method: "POST",
path: "/v1/report",
path: "/report",
description: "Report an API call",
protect: true,
},
@@ -113,26 +103,16 @@ export const externalApiRouter = createTRPCRouter({
.optional()
.describe(
'Extra tags to attach to the call for filtering. Eg { "userId": "123", "promptId": "populate-title" }',
),
)
.default({}),
}),
)
.output(z.void())
.mutation(async ({ input, ctx }) => {
console.log("GOT TAGS", input.tags);
const apiKey = ctx.apiKey;
if (!apiKey) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const key = await prisma.apiKey.findUnique({
where: { apiKey },
});
if (!key) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const reqPayload = await reqValidator.spa(input.reqPayload);
const respPayload = await respValidator.spa(input.respPayload);
const requestHash = hashRequest(key.projectId, reqPayload as JsonValue);
const requestHash = hashRequest(ctx.key.projectId, reqPayload as JsonValue);
const newLoggedCallId = uuidv4();
const newModelResponseId = uuidv4();
@@ -151,7 +131,7 @@ export const externalApiRouter = createTRPCRouter({
prisma.loggedCall.create({
data: {
id: newLoggedCallId,
projectId: key.projectId,
projectId: ctx.key.projectId,
requestedAt: new Date(input.requestedAt),
cacheHit: false,
model,
@@ -185,14 +165,76 @@ export const externalApiRouter = createTRPCRouter({
}),
]);
const tagsToCreate = Object.entries(input.tags ?? {}).map(([name, value]) => ({
loggedCallId: newLoggedCallId,
// sanitize tags
name: name.replaceAll(/[^a-zA-Z0-9_]/g, "_"),
value,
}));
await prisma.loggedCallTag.createMany({
data: tagsToCreate,
await createTags(newLoggedCallId, input.tags);
}),
localTestingOnlyGetLatestLoggedCall: openApiProtectedProc
.meta({
openapi: {
method: "GET",
path: "/local-testing-only-get-latest-logged-call",
description: "Get the latest logged call (only for local testing)",
protect: true, // Make sure to protect this endpoint
},
})
.input(z.void())
.output(
z
.object({
createdAt: z.date(),
cacheHit: z.boolean(),
tags: z.record(z.string().nullable()),
modelResponse: z
.object({
id: z.string(),
statusCode: z.number().nullable(),
errorMessage: z.string().nullable(),
reqPayload: z.unknown(),
respPayload: z.unknown(),
})
.nullable(),
})
.nullable(),
)
.mutation(async ({ ctx }) => {
if (process.env.NODE_ENV === "production") {
throw new Error("This operation is not allowed in production environment");
}
const latestLoggedCall = await prisma.loggedCall.findFirst({
where: { projectId: ctx.key.projectId },
orderBy: { requestedAt: "desc" },
select: {
createdAt: true,
cacheHit: true,
tags: true,
modelResponse: {
select: {
id: true,
statusCode: true,
errorMessage: true,
reqPayload: true,
respPayload: true,
},
},
},
});
return (
latestLoggedCall && {
...latestLoggedCall,
tags: Object.fromEntries(latestLoggedCall.tags.map((tag) => [tag.name, tag.value])),
}
);
}),
});
async function createTags(loggedCallId: string, tags: Record<string, string>) {
const tagsToCreate = Object.entries(tags).map(([name, value]) => ({
loggedCallId,
name: name.replaceAll(/[^a-zA-Z0-9_$]/g, "_"),
value,
}));
await prisma.loggedCallTag.createMany({
data: tagsToCreate,
});
}

View File

@@ -8,7 +8,6 @@ import { evaluationsRouter } from "./routers/evaluations.router";
import { worldChampsRouter } from "./routers/worldChamps.router";
import { datasetsRouter } from "./routers/datasets.router";
import { datasetEntries } from "./routers/datasetEntries.router";
import { externalApiRouter } from "./routers/externalApi.router";
import { projectsRouter } from "./routers/projects.router";
import { dashboardRouter } from "./routers/dashboard.router";
import { loggedCallsRouter } from "./routers/loggedCalls.router";
@@ -31,7 +30,6 @@ export const appRouter = createTRPCRouter({
projects: projectsRouter,
dashboard: dashboardRouter,
loggedCalls: loggedCallsRouter,
externalApi: externalApiRouter,
});
// export type definition of API

View File

@@ -131,6 +131,8 @@ export const promptVariantsRouter = createTRPCRouter({
const inputTokens = overallTokens._sum?.inputTokens ?? 0;
const outputTokens = overallTokens._sum?.outputTokens ?? 0;
const awaitingCompletions = outputCount < scenarioCount;
const awaitingEvals = !!evalResults.find(
(result) => result.totalCount < scenarioCount * evals.length,
);
@@ -142,6 +144,7 @@ export const promptVariantsRouter = createTRPCRouter({
overallCost: overallTokens._sum?.cost ?? 0,
scenarioCount,
outputCount,
awaitingCompletions,
awaitingEvals,
};
}),

View File

@@ -27,7 +27,6 @@ import { capturePath } from "~/utils/analytics/serverAnalytics";
type CreateContextOptions = {
session: Session | null;
apiKey: string | null;
};
// eslint-disable-next-line @typescript-eslint/no-empty-function
@@ -46,7 +45,6 @@ const noOp = () => {};
export const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
session: opts.session,
apiKey: opts.apiKey,
prisma,
markAccessControlRun: noOp,
};
@@ -64,11 +62,8 @@ export const createTRPCContext = async (opts: CreateNextContextOptions) => {
// Get the session from the server using the getServerSession wrapper function
const session = await getServerAuthSession({ req, res });
const apiKey = req.headers.authorization?.split(" ")[1] as string | null;
return createInnerTRPCContext({
session,
apiKey,
});
};

View File

@@ -1,5 +1,5 @@
import "dotenv/config";
import { openApiDocument } from "~/pages/api/openapi.json";
import { openApiDocument } from "~/pages/api/v1/openapi.json";
import fs from "fs";
import path from "path";
import { execSync } from "child_process";

View File

@@ -51,7 +51,7 @@ const requestUpdatedPromptFunction = async (
originalModelProvider.inputSchema,
null,
2,
)}\n\nDo not add any assistant messages.`,
)}`,
},
{
role: "user",

View File

@@ -2,4 +2,4 @@ import cryptoRandomString from "crypto-random-string";
const KEY_LENGTH = 42;
export const generateApiKey = () => `opc_${cryptoRandomString({ length: KEY_LENGTH })}`;
export const generateApiKey = () => `opk_${cryptoRandomString({ length: KEY_LENGTH })}`;

9
app/test-docker.sh Executable file
View File

@@ -0,0 +1,9 @@
#! /bin/bash
set -e
cd "$(dirname "$0")/.."
source app/.env
docker build . --file app/Dockerfile

View File

@@ -3,17 +3,17 @@
"info": {
"title": "OpenPipe API",
"description": "The public API for reporting API calls to OpenPipe",
"version": "0.1.0"
"version": "0.1.1"
},
"servers": [
{
"url": "https://app.openpipe.ai/api"
"url": "https://app.openpipe.ai/api/v1"
}
],
"paths": {
"/v1/check-cache": {
"/check-cache": {
"post": {
"operationId": "externalApi-checkCache",
"operationId": "checkCache",
"description": "Check if a prompt is cached",
"security": [
{
@@ -39,7 +39,8 @@
"additionalProperties": {
"type": "string"
},
"description": "Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }"
"description": "Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }",
"default": {}
}
},
"required": [
@@ -74,9 +75,9 @@
}
}
},
"/v1/report": {
"/report": {
"post": {
"operationId": "externalApi-report",
"operationId": "report",
"description": "Report an API call",
"security": [
{
@@ -117,7 +118,8 @@
"additionalProperties": {
"type": "string"
},
"description": "Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }"
"description": "Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }",
"default": {}
}
},
"required": [
@@ -144,6 +146,82 @@
}
}
}
},
"/local-testing-only-get-latest-logged-call": {
"get": {
"operationId": "localTestingOnlyGetLatestLoggedCall",
"description": "Get the latest logged call (only for local testing)",
"security": [
{
"Authorization": []
}
],
"parameters": [],
"responses": {
"200": {
"description": "Successful response",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"createdAt": {
"type": "string",
"format": "date-time"
},
"cacheHit": {
"type": "boolean"
},
"tags": {
"type": "object",
"additionalProperties": {
"type": "string",
"nullable": true
}
},
"modelResponse": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"statusCode": {
"type": "number",
"nullable": true
},
"errorMessage": {
"type": "string",
"nullable": true
},
"reqPayload": {},
"respPayload": {}
},
"required": [
"id",
"statusCode",
"errorMessage"
],
"additionalProperties": false,
"nullable": true
}
},
"required": [
"createdAt",
"cacheHit",
"tags",
"modelResponse"
],
"additionalProperties": false,
"nullable": true
}
}
}
},
"default": {
"$ref": "#/components/responses/error"
}
}
}
}
},
"components": {

View File

@@ -5,14 +5,14 @@ import httpx
from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.external_api_check_cache_json_body import ExternalApiCheckCacheJsonBody
from ...models.external_api_check_cache_response_200 import ExternalApiCheckCacheResponse200
from ...models.check_cache_json_body import CheckCacheJsonBody
from ...models.check_cache_response_200 import CheckCacheResponse200
from ...types import Response
def _get_kwargs(
*,
json_body: ExternalApiCheckCacheJsonBody,
json_body: CheckCacheJsonBody,
) -> Dict[str, Any]:
pass
@@ -20,16 +20,16 @@ def _get_kwargs(
return {
"method": "post",
"url": "/v1/check-cache",
"url": "/check-cache",
"json": json_json_body,
}
def _parse_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Optional[ExternalApiCheckCacheResponse200]:
) -> Optional[CheckCacheResponse200]:
if response.status_code == HTTPStatus.OK:
response_200 = ExternalApiCheckCacheResponse200.from_dict(response.json())
response_200 = CheckCacheResponse200.from_dict(response.json())
return response_200
if client.raise_on_unexpected_status:
@@ -40,7 +40,7 @@ def _parse_response(
def _build_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Response[ExternalApiCheckCacheResponse200]:
) -> Response[CheckCacheResponse200]:
return Response(
status_code=HTTPStatus(response.status_code),
content=response.content,
@@ -52,19 +52,19 @@ def _build_response(
def sync_detailed(
*,
client: AuthenticatedClient,
json_body: ExternalApiCheckCacheJsonBody,
) -> Response[ExternalApiCheckCacheResponse200]:
json_body: CheckCacheJsonBody,
) -> Response[CheckCacheResponse200]:
"""Check if a prompt is cached
Args:
json_body (ExternalApiCheckCacheJsonBody):
json_body (CheckCacheJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[ExternalApiCheckCacheResponse200]
Response[CheckCacheResponse200]
"""
kwargs = _get_kwargs(
@@ -81,19 +81,19 @@ def sync_detailed(
def sync(
*,
client: AuthenticatedClient,
json_body: ExternalApiCheckCacheJsonBody,
) -> Optional[ExternalApiCheckCacheResponse200]:
json_body: CheckCacheJsonBody,
) -> Optional[CheckCacheResponse200]:
"""Check if a prompt is cached
Args:
json_body (ExternalApiCheckCacheJsonBody):
json_body (CheckCacheJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
ExternalApiCheckCacheResponse200
CheckCacheResponse200
"""
return sync_detailed(
@@ -105,19 +105,19 @@ def sync(
async def asyncio_detailed(
*,
client: AuthenticatedClient,
json_body: ExternalApiCheckCacheJsonBody,
) -> Response[ExternalApiCheckCacheResponse200]:
json_body: CheckCacheJsonBody,
) -> Response[CheckCacheResponse200]:
"""Check if a prompt is cached
Args:
json_body (ExternalApiCheckCacheJsonBody):
json_body (CheckCacheJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[ExternalApiCheckCacheResponse200]
Response[CheckCacheResponse200]
"""
kwargs = _get_kwargs(
@@ -132,19 +132,19 @@ async def asyncio_detailed(
async def asyncio(
*,
client: AuthenticatedClient,
json_body: ExternalApiCheckCacheJsonBody,
) -> Optional[ExternalApiCheckCacheResponse200]:
json_body: CheckCacheJsonBody,
) -> Optional[CheckCacheResponse200]:
"""Check if a prompt is cached
Args:
json_body (ExternalApiCheckCacheJsonBody):
json_body (CheckCacheJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
ExternalApiCheckCacheResponse200
CheckCacheResponse200
"""
return (

View File

@@ -0,0 +1,133 @@
from http import HTTPStatus
from typing import Any, Dict, Optional, Union
import httpx
from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.local_testing_only_get_latest_logged_call_response_200 import (
LocalTestingOnlyGetLatestLoggedCallResponse200,
)
from ...types import Response
def _get_kwargs() -> Dict[str, Any]:
pass
return {
"method": "get",
"url": "/local-testing-only-get-latest-logged-call",
}
def _parse_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Optional[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
if response.status_code == HTTPStatus.OK:
_response_200 = response.json()
response_200: Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]
if _response_200 is None:
response_200 = None
else:
response_200 = LocalTestingOnlyGetLatestLoggedCallResponse200.from_dict(_response_200)
return response_200
if client.raise_on_unexpected_status:
raise errors.UnexpectedStatus(response.status_code, response.content)
else:
return None
def _build_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
return Response(
status_code=HTTPStatus(response.status_code),
content=response.content,
headers=response.headers,
parsed=_parse_response(client=client, response=response),
)
def sync_detailed(
*,
client: AuthenticatedClient,
) -> Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
"""Get the latest logged call (only for local testing)
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]
"""
kwargs = _get_kwargs()
response = client.get_httpx_client().request(
**kwargs,
)
return _build_response(client=client, response=response)
def sync(
*,
client: AuthenticatedClient,
) -> Optional[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
"""Get the latest logged call (only for local testing)
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]
"""
return sync_detailed(
client=client,
).parsed
async def asyncio_detailed(
*,
client: AuthenticatedClient,
) -> Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
"""Get the latest logged call (only for local testing)
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]
"""
kwargs = _get_kwargs()
response = await client.get_async_httpx_client().request(**kwargs)
return _build_response(client=client, response=response)
async def asyncio(
*,
client: AuthenticatedClient,
) -> Optional[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
"""Get the latest logged call (only for local testing)
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]
"""
return (
await asyncio_detailed(
client=client,
)
).parsed

View File

@@ -5,13 +5,13 @@ import httpx
from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.external_api_report_json_body import ExternalApiReportJsonBody
from ...models.report_json_body import ReportJsonBody
from ...types import Response
def _get_kwargs(
*,
json_body: ExternalApiReportJsonBody,
json_body: ReportJsonBody,
) -> Dict[str, Any]:
pass
@@ -19,7 +19,7 @@ def _get_kwargs(
return {
"method": "post",
"url": "/v1/report",
"url": "/report",
"json": json_json_body,
}
@@ -45,12 +45,12 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt
def sync_detailed(
*,
client: AuthenticatedClient,
json_body: ExternalApiReportJsonBody,
json_body: ReportJsonBody,
) -> Response[Any]:
"""Report an API call
Args:
json_body (ExternalApiReportJsonBody):
json_body (ReportJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -74,12 +74,12 @@ def sync_detailed(
async def asyncio_detailed(
*,
client: AuthenticatedClient,
json_body: ExternalApiReportJsonBody,
json_body: ReportJsonBody,
) -> Response[Any]:
"""Report an API call
Args:
json_body (ExternalApiReportJsonBody):
json_body (ReportJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.

View File

@@ -1,15 +1,25 @@
""" Contains all the data models used in inputs/outputs """
from .external_api_check_cache_json_body import ExternalApiCheckCacheJsonBody
from .external_api_check_cache_json_body_tags import ExternalApiCheckCacheJsonBodyTags
from .external_api_check_cache_response_200 import ExternalApiCheckCacheResponse200
from .external_api_report_json_body import ExternalApiReportJsonBody
from .external_api_report_json_body_tags import ExternalApiReportJsonBodyTags
from .check_cache_json_body import CheckCacheJsonBody
from .check_cache_json_body_tags import CheckCacheJsonBodyTags
from .check_cache_response_200 import CheckCacheResponse200
from .local_testing_only_get_latest_logged_call_response_200 import LocalTestingOnlyGetLatestLoggedCallResponse200
from .local_testing_only_get_latest_logged_call_response_200_model_response import (
LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse,
)
from .local_testing_only_get_latest_logged_call_response_200_tags import (
LocalTestingOnlyGetLatestLoggedCallResponse200Tags,
)
from .report_json_body import ReportJsonBody
from .report_json_body_tags import ReportJsonBodyTags
__all__ = (
"ExternalApiCheckCacheJsonBody",
"ExternalApiCheckCacheJsonBodyTags",
"ExternalApiCheckCacheResponse200",
"ExternalApiReportJsonBody",
"ExternalApiReportJsonBodyTags",
"CheckCacheJsonBody",
"CheckCacheJsonBodyTags",
"CheckCacheResponse200",
"LocalTestingOnlyGetLatestLoggedCallResponse200",
"LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse",
"LocalTestingOnlyGetLatestLoggedCallResponse200Tags",
"ReportJsonBody",
"ReportJsonBodyTags",
)

View File

@@ -5,25 +5,25 @@ from attrs import define
from ..types import UNSET, Unset
if TYPE_CHECKING:
from ..models.external_api_check_cache_json_body_tags import ExternalApiCheckCacheJsonBodyTags
from ..models.check_cache_json_body_tags import CheckCacheJsonBodyTags
T = TypeVar("T", bound="ExternalApiCheckCacheJsonBody")
T = TypeVar("T", bound="CheckCacheJsonBody")
@define
class ExternalApiCheckCacheJsonBody:
class CheckCacheJsonBody:
"""
Attributes:
requested_at (float): Unix timestamp in milliseconds
req_payload (Union[Unset, Any]): JSON-encoded request payload
tags (Union[Unset, ExternalApiCheckCacheJsonBodyTags]): Extra tags to attach to the call for filtering. Eg {
"userId": "123", "promptId": "populate-title" }
tags (Union[Unset, CheckCacheJsonBodyTags]): Extra tags to attach to the call for filtering. Eg { "userId":
"123", "promptId": "populate-title" }
"""
requested_at: float
req_payload: Union[Unset, Any] = UNSET
tags: Union[Unset, "ExternalApiCheckCacheJsonBodyTags"] = UNSET
tags: Union[Unset, "CheckCacheJsonBodyTags"] = UNSET
def to_dict(self) -> Dict[str, Any]:
requested_at = self.requested_at
@@ -47,7 +47,7 @@ class ExternalApiCheckCacheJsonBody:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.external_api_check_cache_json_body_tags import ExternalApiCheckCacheJsonBodyTags
from ..models.check_cache_json_body_tags import CheckCacheJsonBodyTags
d = src_dict.copy()
requested_at = d.pop("requestedAt")
@@ -55,16 +55,16 @@ class ExternalApiCheckCacheJsonBody:
req_payload = d.pop("reqPayload", UNSET)
_tags = d.pop("tags", UNSET)
tags: Union[Unset, ExternalApiCheckCacheJsonBodyTags]
tags: Union[Unset, CheckCacheJsonBodyTags]
if isinstance(_tags, Unset):
tags = UNSET
else:
tags = ExternalApiCheckCacheJsonBodyTags.from_dict(_tags)
tags = CheckCacheJsonBodyTags.from_dict(_tags)
external_api_check_cache_json_body = cls(
check_cache_json_body = cls(
requested_at=requested_at,
req_payload=req_payload,
tags=tags,
)
return external_api_check_cache_json_body
return check_cache_json_body

View File

@@ -2,11 +2,11 @@ from typing import Any, Dict, List, Type, TypeVar
from attrs import define, field
T = TypeVar("T", bound="ExternalApiReportJsonBodyTags")
T = TypeVar("T", bound="CheckCacheJsonBodyTags")
@define
class ExternalApiReportJsonBodyTags:
class CheckCacheJsonBodyTags:
"""Extra tags to attach to the call for filtering. Eg { "userId": "123", "promptId": "populate-title" }"""
additional_properties: Dict[str, str] = field(init=False, factory=dict)
@@ -21,10 +21,10 @@ class ExternalApiReportJsonBodyTags:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
external_api_report_json_body_tags = cls()
check_cache_json_body_tags = cls()
external_api_report_json_body_tags.additional_properties = d
return external_api_report_json_body_tags
check_cache_json_body_tags.additional_properties = d
return check_cache_json_body_tags
@property
def additional_keys(self) -> List[str]:

View File

@@ -4,11 +4,11 @@ from attrs import define
from ..types import UNSET, Unset
T = TypeVar("T", bound="ExternalApiCheckCacheResponse200")
T = TypeVar("T", bound="CheckCacheResponse200")
@define
class ExternalApiCheckCacheResponse200:
class CheckCacheResponse200:
"""
Attributes:
resp_payload (Union[Unset, Any]): JSON-encoded response payload
@@ -31,8 +31,8 @@ class ExternalApiCheckCacheResponse200:
d = src_dict.copy()
resp_payload = d.pop("respPayload", UNSET)
external_api_check_cache_response_200 = cls(
check_cache_response_200 = cls(
resp_payload=resp_payload,
)
return external_api_check_cache_response_200
return check_cache_response_200

View File

@@ -0,0 +1,84 @@
import datetime
from typing import TYPE_CHECKING, Any, Dict, Optional, Type, TypeVar
from attrs import define
from dateutil.parser import isoparse
if TYPE_CHECKING:
from ..models.local_testing_only_get_latest_logged_call_response_200_model_response import (
LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse,
)
from ..models.local_testing_only_get_latest_logged_call_response_200_tags import (
LocalTestingOnlyGetLatestLoggedCallResponse200Tags,
)
T = TypeVar("T", bound="LocalTestingOnlyGetLatestLoggedCallResponse200")
@define
class LocalTestingOnlyGetLatestLoggedCallResponse200:
"""
Attributes:
created_at (datetime.datetime):
cache_hit (bool):
tags (LocalTestingOnlyGetLatestLoggedCallResponse200Tags):
model_response (Optional[LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse]):
"""
created_at: datetime.datetime
cache_hit: bool
tags: "LocalTestingOnlyGetLatestLoggedCallResponse200Tags"
model_response: Optional["LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse"]
def to_dict(self) -> Dict[str, Any]:
created_at = self.created_at.isoformat()
cache_hit = self.cache_hit
tags = self.tags.to_dict()
model_response = self.model_response.to_dict() if self.model_response else None
field_dict: Dict[str, Any] = {}
field_dict.update(
{
"createdAt": created_at,
"cacheHit": cache_hit,
"tags": tags,
"modelResponse": model_response,
}
)
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.local_testing_only_get_latest_logged_call_response_200_model_response import (
LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse,
)
from ..models.local_testing_only_get_latest_logged_call_response_200_tags import (
LocalTestingOnlyGetLatestLoggedCallResponse200Tags,
)
d = src_dict.copy()
created_at = isoparse(d.pop("createdAt"))
cache_hit = d.pop("cacheHit")
tags = LocalTestingOnlyGetLatestLoggedCallResponse200Tags.from_dict(d.pop("tags"))
_model_response = d.pop("modelResponse")
model_response: Optional[LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse]
if _model_response is None:
model_response = None
else:
model_response = LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse.from_dict(_model_response)
local_testing_only_get_latest_logged_call_response_200 = cls(
created_at=created_at,
cache_hit=cache_hit,
tags=tags,
model_response=model_response,
)
return local_testing_only_get_latest_logged_call_response_200

View File

@@ -0,0 +1,70 @@
from typing import Any, Dict, Optional, Type, TypeVar, Union
from attrs import define
from ..types import UNSET, Unset
T = TypeVar("T", bound="LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse")
@define
class LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse:
"""
Attributes:
id (str):
status_code (Optional[float]):
error_message (Optional[str]):
req_payload (Union[Unset, Any]):
resp_payload (Union[Unset, Any]):
"""
id: str
status_code: Optional[float]
error_message: Optional[str]
req_payload: Union[Unset, Any] = UNSET
resp_payload: Union[Unset, Any] = UNSET
def to_dict(self) -> Dict[str, Any]:
id = self.id
status_code = self.status_code
error_message = self.error_message
req_payload = self.req_payload
resp_payload = self.resp_payload
field_dict: Dict[str, Any] = {}
field_dict.update(
{
"id": id,
"statusCode": status_code,
"errorMessage": error_message,
}
)
if req_payload is not UNSET:
field_dict["reqPayload"] = req_payload
if resp_payload is not UNSET:
field_dict["respPayload"] = resp_payload
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
id = d.pop("id")
status_code = d.pop("statusCode")
error_message = d.pop("errorMessage")
req_payload = d.pop("reqPayload", UNSET)
resp_payload = d.pop("respPayload", UNSET)
local_testing_only_get_latest_logged_call_response_200_model_response = cls(
id=id,
status_code=status_code,
error_message=error_message,
req_payload=req_payload,
resp_payload=resp_payload,
)
return local_testing_only_get_latest_logged_call_response_200_model_response

View File

@@ -0,0 +1,43 @@
from typing import Any, Dict, List, Optional, Type, TypeVar
from attrs import define, field
T = TypeVar("T", bound="LocalTestingOnlyGetLatestLoggedCallResponse200Tags")
@define
class LocalTestingOnlyGetLatestLoggedCallResponse200Tags:
""" """
additional_properties: Dict[str, Optional[str]] = field(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
local_testing_only_get_latest_logged_call_response_200_tags = cls()
local_testing_only_get_latest_logged_call_response_200_tags.additional_properties = d
return local_testing_only_get_latest_logged_call_response_200_tags
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Optional[str]:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Optional[str]) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties

View File

@@ -5,14 +5,14 @@ from attrs import define
from ..types import UNSET, Unset
if TYPE_CHECKING:
from ..models.external_api_report_json_body_tags import ExternalApiReportJsonBodyTags
from ..models.report_json_body_tags import ReportJsonBodyTags
T = TypeVar("T", bound="ExternalApiReportJsonBody")
T = TypeVar("T", bound="ReportJsonBody")
@define
class ExternalApiReportJsonBody:
class ReportJsonBody:
"""
Attributes:
requested_at (float): Unix timestamp in milliseconds
@@ -21,8 +21,8 @@ class ExternalApiReportJsonBody:
resp_payload (Union[Unset, Any]): JSON-encoded response payload
status_code (Union[Unset, float]): HTTP status code of response
error_message (Union[Unset, str]): User-friendly error message
tags (Union[Unset, ExternalApiReportJsonBodyTags]): Extra tags to attach to the call for filtering. Eg {
"userId": "123", "promptId": "populate-title" }
tags (Union[Unset, ReportJsonBodyTags]): Extra tags to attach to the call for filtering. Eg { "userId": "123",
"promptId": "populate-title" }
"""
requested_at: float
@@ -31,7 +31,7 @@ class ExternalApiReportJsonBody:
resp_payload: Union[Unset, Any] = UNSET
status_code: Union[Unset, float] = UNSET
error_message: Union[Unset, str] = UNSET
tags: Union[Unset, "ExternalApiReportJsonBodyTags"] = UNSET
tags: Union[Unset, "ReportJsonBodyTags"] = UNSET
def to_dict(self) -> Dict[str, Any]:
requested_at = self.requested_at
@@ -66,7 +66,7 @@ class ExternalApiReportJsonBody:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.external_api_report_json_body_tags import ExternalApiReportJsonBodyTags
from ..models.report_json_body_tags import ReportJsonBodyTags
d = src_dict.copy()
requested_at = d.pop("requestedAt")
@@ -82,13 +82,13 @@ class ExternalApiReportJsonBody:
error_message = d.pop("errorMessage", UNSET)
_tags = d.pop("tags", UNSET)
tags: Union[Unset, ExternalApiReportJsonBodyTags]
tags: Union[Unset, ReportJsonBodyTags]
if isinstance(_tags, Unset):
tags = UNSET
else:
tags = ExternalApiReportJsonBodyTags.from_dict(_tags)
tags = ReportJsonBodyTags.from_dict(_tags)
external_api_report_json_body = cls(
report_json_body = cls(
requested_at=requested_at,
received_at=received_at,
req_payload=req_payload,
@@ -98,4 +98,4 @@ class ExternalApiReportJsonBody:
tags=tags,
)
return external_api_report_json_body
return report_json_body

View File

@@ -2,11 +2,11 @@ from typing import Any, Dict, List, Type, TypeVar
from attrs import define, field
T = TypeVar("T", bound="ExternalApiCheckCacheJsonBodyTags")
T = TypeVar("T", bound="ReportJsonBodyTags")
@define
class ExternalApiCheckCacheJsonBodyTags:
class ReportJsonBodyTags:
"""Extra tags to attach to the call for filtering. Eg { "userId": "123", "promptId": "populate-title" }"""
additional_properties: Dict[str, str] = field(init=False, factory=dict)
@@ -21,10 +21,10 @@ class ExternalApiCheckCacheJsonBodyTags:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
external_api_check_cache_json_body_tags = cls()
report_json_body_tags = cls()
external_api_check_cache_json_body_tags.additional_properties = d
return external_api_check_cache_json_body_tags
report_json_body_tags.additional_properties = d
return report_json_body_tags
@property
def additional_keys(self) -> List[str]:

View File

@@ -1,9 +1,9 @@
from typing import Any, Optional
def merge_streamed_chunks(base: Optional[Any], chunk: Any) -> Any:
def merge_openai_chunks(base: Optional[Any], chunk: Any) -> Any:
if base is None:
return merge_streamed_chunks({**chunk, "choices": []}, chunk)
return merge_openai_chunks({**chunk, "choices": []}, chunk)
choices = base["choices"].copy()
for choice in chunk["choices"]:
@@ -34,9 +34,7 @@ def merge_streamed_chunks(base: Optional[Any], chunk: Any) -> Any:
{**new_choice, "message": {"role": "assistant", **choice["delta"]}}
)
merged = {
return {
**base,
"choices": choices,
}
return merged

View File

@@ -3,9 +3,16 @@ from openai.openai_object import OpenAIObject
import time
import inspect
from openpipe.merge_openai_chunks import merge_streamed_chunks
from openpipe.merge_openai_chunks import merge_openai_chunks
from openpipe.openpipe_meta import OpenPipeMeta
from .shared import maybe_check_cache, maybe_check_cache_async, report_async, report
from .shared import (
_should_check_cache,
maybe_check_cache,
maybe_check_cache_async,
report_async,
report,
)
class WrappedChatCompletion(original_openai.ChatCompletion):
@@ -29,9 +36,15 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
def _gen():
assembled_completion = None
for chunk in chat_completion:
assembled_completion = merge_streamed_chunks(
assembled_completion = merge_openai_chunks(
assembled_completion, chunk
)
cache_status = (
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
)
chunk.openpipe = OpenPipeMeta(cache_status=cache_status)
yield chunk
received_at = int(time.time() * 1000)
@@ -58,6 +71,10 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
status_code=200,
)
cache_status = (
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
)
chat_completion["openpipe"] = OpenPipeMeta(cache_status=cache_status)
return chat_completion
except Exception as e:
received_at = int(time.time() * 1000)
@@ -96,21 +113,28 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
requested_at = int(time.time() * 1000)
try:
chat_completion = original_openai.ChatCompletion.acreate(*args, **kwargs)
chat_completion = await original_openai.ChatCompletion.acreate(
*args, **kwargs
)
if inspect.isgenerator(chat_completion):
if inspect.isasyncgen(chat_completion):
def _gen():
async def _gen():
assembled_completion = None
for chunk in chat_completion:
assembled_completion = merge_streamed_chunks(
async for chunk in chat_completion:
assembled_completion = merge_openai_chunks(
assembled_completion, chunk
)
cache_status = (
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
)
chunk.openpipe = OpenPipeMeta(cache_status=cache_status)
yield chunk
received_at = int(time.time() * 1000)
report_async(
await report_async(
openpipe_options=openpipe_options,
requested_at=requested_at,
received_at=received_at,
@@ -123,7 +147,7 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
else:
received_at = int(time.time() * 1000)
report_async(
await report_async(
openpipe_options=openpipe_options,
requested_at=requested_at,
received_at=received_at,
@@ -132,12 +156,17 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
status_code=200,
)
cache_status = (
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
)
chat_completion["openpipe"] = OpenPipeMeta(cache_status=cache_status)
return chat_completion
except Exception as e:
received_at = int(time.time() * 1000)
if isinstance(e, original_openai.OpenAIError):
report_async(
await report_async(
openpipe_options=openpipe_options,
requested_at=requested_at,
received_at=received_at,
@@ -147,7 +176,7 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
status_code=e.http_status,
)
else:
report_async(
await report_async(
openpipe_options=openpipe_options,
requested_at=requested_at,
received_at=received_at,

View File

@@ -0,0 +1,7 @@
from attr import dataclass
@dataclass
class OpenPipeMeta:
# Cache status. One of 'HIT', 'MISS', 'SKIP'
cache_status: str

View File

@@ -1,10 +1,10 @@
from openpipe.api_client.api.default import (
external_api_report,
external_api_check_cache,
report as api_report,
check_cache,
)
from openpipe.api_client.client import AuthenticatedClient
from openpipe.api_client.models.external_api_report_json_body_tags import (
ExternalApiReportJsonBodyTags,
from openpipe.api_client.models.report_json_body_tags import (
ReportJsonBodyTags,
)
import toml
import time
@@ -21,7 +21,7 @@ def _get_tags(openpipe_options):
tags["$sdk"] = "python"
tags["$sdk_version"] = version
return ExternalApiReportJsonBodyTags.from_dict(tags)
return ReportJsonBodyTags.from_dict(tags)
def _should_check_cache(openpipe_options):
@@ -31,7 +31,7 @@ def _should_check_cache(openpipe_options):
def _process_cache_payload(
payload: external_api_check_cache.ExternalApiCheckCacheResponse200,
payload: check_cache.CheckCacheResponse200,
):
if not payload or not payload.resp_payload:
return None
@@ -47,9 +47,9 @@ def maybe_check_cache(
if not _should_check_cache(openpipe_options):
return None
try:
payload = external_api_check_cache.sync(
payload = check_cache.sync(
client=configured_client,
json_body=external_api_check_cache.ExternalApiCheckCacheJsonBody(
json_body=check_cache.CheckCacheJsonBody(
req_payload=req_payload,
requested_at=int(time.time() * 1000),
tags=_get_tags(openpipe_options),
@@ -72,9 +72,9 @@ async def maybe_check_cache_async(
return None
try:
payload = await external_api_check_cache.asyncio(
payload = await check_cache.asyncio(
client=configured_client,
json_body=external_api_check_cache.ExternalApiCheckCacheJsonBody(
json_body=check_cache.CheckCacheJsonBody(
req_payload=req_payload,
requested_at=int(time.time() * 1000),
tags=_get_tags(openpipe_options),
@@ -94,9 +94,9 @@ def report(
**kwargs,
):
try:
external_api_report.sync_detailed(
api_report.sync_detailed(
client=configured_client,
json_body=external_api_report.ExternalApiReportJsonBody(
json_body=api_report.ReportJsonBody(
**kwargs,
tags=_get_tags(openpipe_options),
),
@@ -112,9 +112,9 @@ async def report_async(
**kwargs,
):
try:
await external_api_report.asyncio_detailed(
await api_report.asyncio_detailed(
client=configured_client,
json_body=external_api_report.ExternalApiReportJsonBody(
json_body=api_report.ReportJsonBody(
**kwargs,
tags=_get_tags(openpipe_options),
),

View File

@@ -1,55 +1,106 @@
from functools import reduce
from dotenv import load_dotenv
from . import openai, configure_openpipe
import os
import pytest
from . import openai, configure_openpipe, configured_client
from .api_client.api.default import local_testing_only_get_latest_logged_call
from .merge_openai_chunks import merge_openai_chunks
import random
import string
def random_string(length):
letters = string.ascii_lowercase
return "".join(random.choice(letters) for i in range(length))
load_dotenv()
openai.api_key = os.getenv("OPENAI_API_KEY")
configure_openpipe(
base_url="http://localhost:3000/api", api_key=os.getenv("OPENPIPE_API_KEY")
base_url="http://localhost:3000/api/v1", api_key=os.getenv("OPENPIPE_API_KEY")
)
def last_logged_call():
return local_testing_only_get_latest_logged_call.sync(client=configured_client)
def test_sync():
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{"role": "system", "content": "count to 10"}],
messages=[{"role": "system", "content": "count to 3"}],
)
print(completion.choices[0].message.content)
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== completion.choices[0].message.content
)
assert (
last_logged.model_response.req_payload["messages"][0]["content"] == "count to 3"
)
assert completion.openpipe.cache_status == "SKIP"
def test_streaming():
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{"role": "system", "content": "count to 10"}],
messages=[{"role": "system", "content": "count to 4"}],
stream=True,
)
for chunk in completion:
print(chunk)
merged = reduce(merge_openai_chunks, completion, None)
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== merged["choices"][0]["message"]["content"]
)
async def test_async():
acompletion = await openai.ChatCompletion.acreate(
completion = await openai.ChatCompletion.acreate(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "count down from 5"}],
)
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== completion.choices[0].message.content
)
assert (
last_logged.model_response.req_payload["messages"][0]["content"]
== "count down from 5"
)
print(acompletion.choices[0].message.content)
assert completion.openpipe.cache_status == "SKIP"
async def test_async_streaming():
acompletion = await openai.ChatCompletion.acreate(
completion = await openai.ChatCompletion.acreate(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "count down from 5"}],
stream=True,
)
async for chunk in acompletion:
print(chunk)
merged = None
async for chunk in completion:
assert chunk.openpipe.cache_status == "SKIP"
merged = merge_openai_chunks(merged, chunk)
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== merged["choices"][0]["message"]["content"]
)
assert (
last_logged.model_response.req_payload["messages"][0]["content"]
== "count down from 5"
)
assert merged["openpipe"].cache_status == "SKIP"
def test_sync_with_tags():
@@ -58,31 +109,54 @@ def test_sync_with_tags():
messages=[{"role": "system", "content": "count to 10"}],
openpipe={"tags": {"promptId": "testprompt"}},
)
print("finished")
print(completion.choices[0].message.content)
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== completion.choices[0].message.content
)
print(last_logged.tags)
assert last_logged.tags["promptId"] == "testprompt"
assert last_logged.tags["$sdk"] == "python"
def test_bad_call():
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo-blaster",
messages=[{"role": "system", "content": "count to 10"}],
stream=True,
try:
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo-blaster",
messages=[{"role": "system", "content": "count to 10"}],
stream=True,
)
assert False
except Exception as e:
pass
last_logged = last_logged_call()
print(last_logged)
assert (
last_logged.model_response.error_message
== "The model `gpt-3.5-turbo-blaster` does not exist"
)
assert last_logged.model_response.status_code == 404
@pytest.mark.focus
async def test_caching():
messages = [{"role": "system", "content": f"repeat '{random_string(10)}'"}]
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{"role": "system", "content": "count to 10"}],
messages=messages,
openpipe={"cache": True},
)
assert completion.openpipe.cache_status == "MISS"
first_logged = last_logged_call()
assert (
completion.choices[0].message.content
== first_logged.model_response.resp_payload["choices"][0]["message"]["content"]
)
completion2 = await openai.ChatCompletion.acreate(
model="gpt-3.5-turbo",
messages=[{"role": "system", "content": "count to 10"}],
openpipe={"cache": True},
)
print(completion2)
assert completion2.openpipe.cache_status == "HIT"

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.0
* The version of the OpenAPI document: 0.1.1
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
@@ -26,125 +26,193 @@ import { BASE_PATH, COLLECTION_FORMATS, BaseAPI, RequiredError } from './base';
/**
*
* @export
* @interface ExternalApiCheckCache200Response
* @interface CheckCache200Response
*/
export interface ExternalApiCheckCache200Response {
export interface CheckCache200Response {
/**
* JSON-encoded response payload
* @type {any}
* @memberof ExternalApiCheckCache200Response
* @memberof CheckCache200Response
*/
'respPayload'?: any;
}
/**
*
* @export
* @interface ExternalApiCheckCacheDefaultResponse
* @interface CheckCacheDefaultResponse
*/
export interface ExternalApiCheckCacheDefaultResponse {
export interface CheckCacheDefaultResponse {
/**
*
* @type {string}
* @memberof ExternalApiCheckCacheDefaultResponse
* @memberof CheckCacheDefaultResponse
*/
'message': string;
/**
*
* @type {string}
* @memberof ExternalApiCheckCacheDefaultResponse
* @memberof CheckCacheDefaultResponse
*/
'code': string;
/**
*
* @type {Array<ExternalApiCheckCacheDefaultResponseIssuesInner>}
* @memberof ExternalApiCheckCacheDefaultResponse
* @type {Array<CheckCacheDefaultResponseIssuesInner>}
* @memberof CheckCacheDefaultResponse
*/
'issues'?: Array<ExternalApiCheckCacheDefaultResponseIssuesInner>;
'issues'?: Array<CheckCacheDefaultResponseIssuesInner>;
}
/**
*
* @export
* @interface ExternalApiCheckCacheDefaultResponseIssuesInner
* @interface CheckCacheDefaultResponseIssuesInner
*/
export interface ExternalApiCheckCacheDefaultResponseIssuesInner {
export interface CheckCacheDefaultResponseIssuesInner {
/**
*
* @type {string}
* @memberof ExternalApiCheckCacheDefaultResponseIssuesInner
* @memberof CheckCacheDefaultResponseIssuesInner
*/
'message': string;
}
/**
*
* @export
* @interface ExternalApiCheckCacheRequest
* @interface CheckCacheRequest
*/
export interface ExternalApiCheckCacheRequest {
export interface CheckCacheRequest {
/**
* Unix timestamp in milliseconds
* @type {number}
* @memberof ExternalApiCheckCacheRequest
* @memberof CheckCacheRequest
*/
'requestedAt': number;
/**
* JSON-encoded request payload
* @type {any}
* @memberof ExternalApiCheckCacheRequest
* @memberof CheckCacheRequest
*/
'reqPayload'?: any;
/**
* Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }
* @type {{ [key: string]: string; }}
* @memberof ExternalApiCheckCacheRequest
* @memberof CheckCacheRequest
*/
'tags'?: { [key: string]: string; };
}
/**
*
* @export
* @interface ExternalApiReportRequest
* @interface LocalTestingOnlyGetLatestLoggedCall200Response
*/
export interface ExternalApiReportRequest {
export interface LocalTestingOnlyGetLatestLoggedCall200Response {
/**
*
* @type {string}
* @memberof LocalTestingOnlyGetLatestLoggedCall200Response
*/
'createdAt': string;
/**
*
* @type {boolean}
* @memberof LocalTestingOnlyGetLatestLoggedCall200Response
*/
'cacheHit': boolean;
/**
*
* @type {{ [key: string]: string; }}
* @memberof LocalTestingOnlyGetLatestLoggedCall200Response
*/
'tags': { [key: string]: string; };
/**
*
* @type {LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse}
* @memberof LocalTestingOnlyGetLatestLoggedCall200Response
*/
'modelResponse': LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse | null;
}
/**
*
* @export
* @interface LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
export interface LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse {
/**
*
* @type {string}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'id': string;
/**
*
* @type {number}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'statusCode': number | null;
/**
*
* @type {string}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'errorMessage': string | null;
/**
*
* @type {any}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'reqPayload'?: any;
/**
*
* @type {any}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'respPayload'?: any;
}
/**
*
* @export
* @interface ReportRequest
*/
export interface ReportRequest {
/**
* Unix timestamp in milliseconds
* @type {number}
* @memberof ExternalApiReportRequest
* @memberof ReportRequest
*/
'requestedAt': number;
/**
* Unix timestamp in milliseconds
* @type {number}
* @memberof ExternalApiReportRequest
* @memberof ReportRequest
*/
'receivedAt': number;
/**
* JSON-encoded request payload
* @type {any}
* @memberof ExternalApiReportRequest
* @memberof ReportRequest
*/
'reqPayload'?: any;
/**
* JSON-encoded response payload
* @type {any}
* @memberof ExternalApiReportRequest
* @memberof ReportRequest
*/
'respPayload'?: any;
/**
* HTTP status code of response
* @type {number}
* @memberof ExternalApiReportRequest
* @memberof ReportRequest
*/
'statusCode'?: number;
/**
* User-friendly error message
* @type {string}
* @memberof ExternalApiReportRequest
* @memberof ReportRequest
*/
'errorMessage'?: string;
/**
* Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }
* @type {{ [key: string]: string; }}
* @memberof ExternalApiReportRequest
* @memberof ReportRequest
*/
'tags'?: { [key: string]: string; };
}
@@ -157,14 +225,14 @@ export const DefaultApiAxiosParamCreator = function (configuration?: Configurati
return {
/**
* Check if a prompt is cached
* @param {ExternalApiCheckCacheRequest} externalApiCheckCacheRequest
* @param {CheckCacheRequest} checkCacheRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
externalApiCheckCache: async (externalApiCheckCacheRequest: ExternalApiCheckCacheRequest, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'externalApiCheckCacheRequest' is not null or undefined
assertParamExists('externalApiCheckCache', 'externalApiCheckCacheRequest', externalApiCheckCacheRequest)
const localVarPath = `/v1/check-cache`;
checkCache: async (checkCacheRequest: CheckCacheRequest, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'checkCacheRequest' is not null or undefined
assertParamExists('checkCache', 'checkCacheRequest', checkCacheRequest)
const localVarPath = `/check-cache`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
@@ -187,7 +255,40 @@ export const DefaultApiAxiosParamCreator = function (configuration?: Configurati
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(externalApiCheckCacheRequest, localVarRequestOptions, configuration)
localVarRequestOptions.data = serializeDataIfNeeded(checkCacheRequest, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
options: localVarRequestOptions,
};
},
/**
* Get the latest logged call (only for local testing)
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
localTestingOnlyGetLatestLoggedCall: async (options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
const localVarPath = `/local-testing-only-get-latest-logged-call`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
if (configuration) {
baseOptions = configuration.baseOptions;
}
const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
const localVarHeaderParameter = {} as any;
const localVarQueryParameter = {} as any;
// authentication Authorization required
// http bearer authentication required
await setBearerAuthToObject(localVarHeaderParameter, configuration)
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
return {
url: toPathString(localVarUrlObj),
@@ -196,14 +297,14 @@ export const DefaultApiAxiosParamCreator = function (configuration?: Configurati
},
/**
* Report an API call
* @param {ExternalApiReportRequest} externalApiReportRequest
* @param {ReportRequest} reportRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
externalApiReport: async (externalApiReportRequest: ExternalApiReportRequest, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'externalApiReportRequest' is not null or undefined
assertParamExists('externalApiReport', 'externalApiReportRequest', externalApiReportRequest)
const localVarPath = `/v1/report`;
report: async (reportRequest: ReportRequest, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'reportRequest' is not null or undefined
assertParamExists('report', 'reportRequest', reportRequest)
const localVarPath = `/report`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
@@ -226,7 +327,7 @@ export const DefaultApiAxiosParamCreator = function (configuration?: Configurati
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(externalApiReportRequest, localVarRequestOptions, configuration)
localVarRequestOptions.data = serializeDataIfNeeded(reportRequest, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
@@ -245,22 +346,31 @@ export const DefaultApiFp = function(configuration?: Configuration) {
return {
/**
* Check if a prompt is cached
* @param {ExternalApiCheckCacheRequest} externalApiCheckCacheRequest
* @param {CheckCacheRequest} checkCacheRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async externalApiCheckCache(externalApiCheckCacheRequest: ExternalApiCheckCacheRequest, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<ExternalApiCheckCache200Response>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.externalApiCheckCache(externalApiCheckCacheRequest, options);
async checkCache(checkCacheRequest: CheckCacheRequest, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<CheckCache200Response>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.checkCache(checkCacheRequest, options);
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
},
/**
* Get the latest logged call (only for local testing)
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async localTestingOnlyGetLatestLoggedCall(options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<LocalTestingOnlyGetLatestLoggedCall200Response>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.localTestingOnlyGetLatestLoggedCall(options);
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
},
/**
* Report an API call
* @param {ExternalApiReportRequest} externalApiReportRequest
* @param {ReportRequest} reportRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async externalApiReport(externalApiReportRequest: ExternalApiReportRequest, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.externalApiReport(externalApiReportRequest, options);
async report(reportRequest: ReportRequest, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.report(reportRequest, options);
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
},
}
@@ -275,21 +385,29 @@ export const DefaultApiFactory = function (configuration?: Configuration, basePa
return {
/**
* Check if a prompt is cached
* @param {ExternalApiCheckCacheRequest} externalApiCheckCacheRequest
* @param {CheckCacheRequest} checkCacheRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
externalApiCheckCache(externalApiCheckCacheRequest: ExternalApiCheckCacheRequest, options?: any): AxiosPromise<ExternalApiCheckCache200Response> {
return localVarFp.externalApiCheckCache(externalApiCheckCacheRequest, options).then((request) => request(axios, basePath));
checkCache(checkCacheRequest: CheckCacheRequest, options?: any): AxiosPromise<CheckCache200Response> {
return localVarFp.checkCache(checkCacheRequest, options).then((request) => request(axios, basePath));
},
/**
* Get the latest logged call (only for local testing)
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
localTestingOnlyGetLatestLoggedCall(options?: any): AxiosPromise<LocalTestingOnlyGetLatestLoggedCall200Response> {
return localVarFp.localTestingOnlyGetLatestLoggedCall(options).then((request) => request(axios, basePath));
},
/**
* Report an API call
* @param {ExternalApiReportRequest} externalApiReportRequest
* @param {ReportRequest} reportRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
externalApiReport(externalApiReportRequest: ExternalApiReportRequest, options?: any): AxiosPromise<any> {
return localVarFp.externalApiReport(externalApiReportRequest, options).then((request) => request(axios, basePath));
report(reportRequest: ReportRequest, options?: any): AxiosPromise<any> {
return localVarFp.report(reportRequest, options).then((request) => request(axios, basePath));
},
};
};
@@ -303,24 +421,34 @@ export const DefaultApiFactory = function (configuration?: Configuration, basePa
export class DefaultApi extends BaseAPI {
/**
* Check if a prompt is cached
* @param {ExternalApiCheckCacheRequest} externalApiCheckCacheRequest
* @param {CheckCacheRequest} checkCacheRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public externalApiCheckCache(externalApiCheckCacheRequest: ExternalApiCheckCacheRequest, options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).externalApiCheckCache(externalApiCheckCacheRequest, options).then((request) => request(this.axios, this.basePath));
public checkCache(checkCacheRequest: CheckCacheRequest, options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).checkCache(checkCacheRequest, options).then((request) => request(this.axios, this.basePath));
}
/**
* Get the latest logged call (only for local testing)
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public localTestingOnlyGetLatestLoggedCall(options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).localTestingOnlyGetLatestLoggedCall(options).then((request) => request(this.axios, this.basePath));
}
/**
* Report an API call
* @param {ExternalApiReportRequest} externalApiReportRequest
* @param {ReportRequest} reportRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public externalApiReport(externalApiReportRequest: ExternalApiReportRequest, options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).externalApiReport(externalApiReportRequest, options).then((request) => request(this.axios, this.basePath));
public report(reportRequest: ReportRequest, options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).report(reportRequest, options).then((request) => request(this.axios, this.basePath));
}
}

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.0
* The version of the OpenAPI document: 0.1.1
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
@@ -19,7 +19,7 @@ import type { Configuration } from './configuration';
import type { AxiosPromise, AxiosInstance, AxiosRequestConfig } from 'axios';
import globalAxios from 'axios';
export const BASE_PATH = "https://app.openpipe.ai/api".replace(/\/+$/, "");
export const BASE_PATH = "https://app.openpipe.ai/api/v1".replace(/\/+$/, "");
/**
*

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.0
* The version of the OpenAPI document: 0.1.1
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.0
* The version of the OpenAPI document: 0.1.1
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.0
* The version of the OpenAPI document: 0.1.1
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -7,9 +7,8 @@ databases:
services:
- type: web
name: querykey-prod-web
rootDir: app
env: docker
dockerfilePath: Dockerfile
dockerfilePath: ./app/Dockerfile
dockerContext: .
plan: standard
domains:
@@ -32,9 +31,8 @@ services:
- type: web
name: querykey-prod-wss
rootDir: app
env: docker
dockerfilePath: Dockerfile
dockerfilePath: ./app/Dockerfile
dockerContext: .
plan: free
dockerCommand: pnpm tsx src/wss-server.ts