Compare commits

..

1 Commits

Author SHA1 Message Date
David Corbitt
5e56c93c3f Remove scenarios header from output table card 2023-08-13 01:40:03 -07:00
45 changed files with 328 additions and 1143 deletions

View File

@@ -1,5 +0,0 @@
**/node_modules/
.git
**/.venv/
**/.env*
**/.next/

View File

@@ -32,5 +32,5 @@ NEXT_PUBLIC_HOST="http://localhost:3000"
GITHUB_CLIENT_ID="your_client_id"
GITHUB_CLIENT_SECRET="your_secret"
OPENPIPE_BASE_URL="http://localhost:3000/api/v1"
OPENPIPE_BASE_URL="http://localhost:3000/api"
OPENPIPE_API_KEY="your_key"

View File

@@ -12,11 +12,12 @@ declare module "nextjs-routes" {
export type Route =
| StaticRoute<"/account/signin">
| DynamicRoute<"/api/[...trpc]", { "trpc": string[] }>
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
| StaticRoute<"/api/experiments/og-image">
| StaticRoute<"/api/openapi">
| StaticRoute<"/api/sentry-example-api">
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>
| DynamicRoute<"/api/v1/[...trpc]", { "trpc": string[] }>
| StaticRoute<"/api/v1/openapi">
| StaticRoute<"/dashboard">
| DynamicRoute<"/data/[id]", { "id": string }>
| StaticRoute<"/data">

View File

@@ -6,13 +6,13 @@ RUN yarn global add pnpm
# DEPS
FROM base as deps
WORKDIR /code
WORKDIR /app
COPY app/prisma app/package.json ./app/
COPY client-libs/typescript/package.json ./client-libs/typescript/
COPY pnpm-lock.yaml pnpm-workspace.yaml ./
COPY prisma ./
RUN cd app && pnpm install --frozen-lockfile
COPY package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile
# BUILDER
FROM base as builder
@@ -25,24 +25,22 @@ ARG NEXT_PUBLIC_SENTRY_DSN
ARG SENTRY_AUTH_TOKEN
ARG NEXT_PUBLIC_FF_SHOW_LOGGED_CALLS
WORKDIR /code
COPY --from=deps /code/node_modules ./node_modules
COPY --from=deps /code/app/node_modules ./app/node_modules
COPY --from=deps /code/client-libs/typescript/node_modules ./client-libs/typescript/node_modules
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
RUN cd app && SKIP_ENV_VALIDATION=1 pnpm build
RUN SKIP_ENV_VALIDATION=1 pnpm build
# RUNNER
FROM base as runner
WORKDIR /code/app
WORKDIR /app
ENV NODE_ENV production
ENV NEXT_TELEMETRY_DISABLED 1
COPY --from=builder /code/ /code/
COPY --from=builder /app/ ./
EXPOSE 3000
ENV PORT 3000
# Run the "run-prod.sh" script
CMD /code/app/run-prod.sh
CMD /app/run-prod.sh

View File

@@ -112,17 +112,17 @@ model ScenarioVariantCell {
model ModelResponse {
id String @id @default(uuid()) @db.Uuid
cacheKey String
requestedAt DateTime?
receivedAt DateTime?
respPayload Json?
cost Float?
inputTokens Int?
outputTokens Int?
statusCode Int?
errorMessage String?
retryTime DateTime?
outdated Boolean @default(false)
cacheKey String
requestedAt DateTime?
receivedAt DateTime?
respPayload Json?
cost Float?
inputTokens Int?
outputTokens Int?
statusCode Int?
errorMessage String?
retryTime DateTime?
outdated Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -273,8 +273,8 @@ model LoggedCall {
projectId String @db.Uuid
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
model String?
tags LoggedCallTag[]
model String?
tags LoggedCallTag[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -295,7 +295,7 @@ model LoggedCallModelResponse {
errorMessage String?
requestedAt DateTime
receivedAt DateTime
receivedAt DateTime
// Note: the function to calculate the cacheKey should include the project
// ID so we don't share cached responses between projects, which could be an
@@ -340,8 +340,8 @@ model ApiKey {
name String
apiKey String @unique
projectId String @db.Uuid
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String @db.Uuid
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt

View File

@@ -2,7 +2,6 @@ import { prisma } from "~/server/db";
import dedent from "dedent";
import { generateNewCell } from "~/server/utils/generateNewCell";
import { promptConstructorVersion } from "~/promptConstructor/version";
import { env } from "~/env.mjs";
const defaultId = "11111111-1111-1111-1111-111111111111";
@@ -17,16 +16,6 @@ const project =
data: { id: defaultId },
}));
if (env.OPENPIPE_API_KEY) {
await prisma.apiKey.create({
data: {
projectId: project.id,
name: "Default API Key",
apiKey: env.OPENPIPE_API_KEY,
},
});
}
await prisma.experiment.deleteMany({
where: {
id: defaultId,

View File

@@ -33,7 +33,7 @@ export default function OutputCell({
if (!templateHasVariables) disabledReason = "Add a value to the scenario variables to see output";
const [refetchInterval, setRefetchInterval] = useState<number | false>(false);
const [refetchInterval, setRefetchInterval] = useState(0);
const { data: cell, isLoading: queryLoading } = api.scenarioVariantCells.get.useQuery(
{ scenarioId: scenario.id, variantId: variant.id },
{ refetchInterval },
@@ -64,8 +64,7 @@ export default function OutputCell({
cell.retrievalStatus === "PENDING" ||
cell.retrievalStatus === "IN_PROGRESS" ||
hardRefetching;
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : false), [awaitingOutput]);
useEffect(() => setRefetchInterval(awaitingOutput ? 1000 : 0), [awaitingOutput]);
// TODO: disconnect from socket if we're not streaming anymore
const streamedMessage = useSocket<OutputSchema>(cell?.id);
@@ -121,13 +120,8 @@ export default function OutputCell({
? response.receivedAt.getTime()
: Date.now();
if (response.requestedAt) {
numWaitingMessages = Math.min(
Math.floor(
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
),
// Don't try to render more than 15, it'll use too much CPU and
// break the page
15,
numWaitingMessages = Math.floor(
(relativeWaitingTime - response.requestedAt.getTime()) / WAITING_MESSAGE_INTERVAL,
);
}
return (

View File

@@ -21,18 +21,14 @@ export default function VariantStats(props: { variant: PromptVariant }) {
outputTokens: 0,
scenarioCount: 0,
outputCount: 0,
awaitingCompletions: false,
awaitingEvals: false,
},
refetchInterval,
},
);
// Poll every five seconds while we are waiting for LLM retrievals to finish
useEffect(
() => setRefetchInterval(data.awaitingCompletions || data.awaitingEvals ? 5000 : 0),
[data.awaitingCompletions, data.awaitingEvals],
);
// Poll every two seconds while we are waiting for LLM retrievals to finish
useEffect(() => setRefetchInterval(data.awaitingEvals ? 5000 : 0), [data.awaitingEvals]);
const [passColor, neutralColor, failColor] = useToken("colors", [
"green.500",

View File

@@ -120,9 +120,9 @@ export const refinementActions: Record<string, RefinementAction> = {
"Convert to function call": {
icon: TfiThought,
description: "Use function calls to get output from the model in a more structured way.",
instructions: `OpenAI functions are a specialized way for an LLM to return its final output.
instructions: `OpenAI functions are a specialized way for an LLM to return output.
Example 1 before:
This is what a prompt looks like before adding a function:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
@@ -139,7 +139,7 @@ export const refinementActions: Record<string, RefinementAction> = {
],
});
Example 1 after:
This is what one looks like after adding a function:
definePrompt("openai/ChatCompletion", {
model: "gpt-4",
@@ -156,7 +156,7 @@ export const refinementActions: Record<string, RefinementAction> = {
],
functions: [
{
name: "log_extracted_sentiment",
name: "extract_sentiment",
parameters: {
type: "object", // parameters must always be an object with a properties key
properties: { // properties key is required
@@ -169,13 +169,13 @@ export const refinementActions: Record<string, RefinementAction> = {
},
],
function_call: {
name: "log_extracted_sentiment",
name: "extract_sentiment",
},
});
=========
Example 2 before:
Here's another example of adding a function:
Before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -197,7 +197,7 @@ export const refinementActions: Record<string, RefinementAction> = {
temperature: 0,
});
Example 2 after:
After:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -215,7 +215,7 @@ export const refinementActions: Record<string, RefinementAction> = {
temperature: 0,
functions: [
{
name: "log_post_score",
name: "score_post",
parameters: {
type: "object",
properties: {
@@ -227,13 +227,13 @@ export const refinementActions: Record<string, RefinementAction> = {
},
],
function_call: {
name: "log_post_score",
name: "score_post",
},
});
=========
Another example
Example 3 before:
Before:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -246,7 +246,7 @@ export const refinementActions: Record<string, RefinementAction> = {
],
});
Example 3 after:
After:
definePrompt("openai/ChatCompletion", {
model: "gpt-3.5-turbo",
@@ -258,24 +258,21 @@ export const refinementActions: Record<string, RefinementAction> = {
],
functions: [
{
name: "log_translated_text",
name: "write_in_language",
parameters: {
type: "object",
properties: {
translated_text: {
text: {
type: "string",
description: "The text, written in the language specified in the prompt",
},
},
},
},
],
function_call: {
name: "log_translated_text",
name: "write_in_language",
},
});
=========
Add an OpenAI function that takes one or more nested parameters that match the expected output from this prompt.`,
},

View File

@@ -1,14 +1,17 @@
import { type NextApiRequest, type NextApiResponse } from "next";
import cors from "nextjs-cors";
import { createOpenApiNextHandler } from "trpc-openapi";
import { v1ApiRouter } from "~/server/api/external/v1Api.router";
import { createOpenApiContext } from "~/server/api/external/openApiTrpc";
import { createProcedureCache } from "trpc-openapi/dist/adapters/node-http/procedures";
import { appRouter } from "~/server/api/root.router";
import { createTRPCContext } from "~/server/api/trpc";
const openApiHandler = createOpenApiNextHandler({
router: v1ApiRouter,
createContext: createOpenApiContext,
router: appRouter,
createContext: createTRPCContext,
});
const cache = createProcedureCache(appRouter);
const handler = async (req: NextApiRequest, res: NextApiResponse) => {
// Setup CORS
await cors(req, res);

View File

@@ -1,12 +1,12 @@
import { type NextApiRequest, type NextApiResponse } from "next";
import { generateOpenApiDocument } from "trpc-openapi";
import { v1ApiRouter } from "~/server/api/external/v1Api.router";
import { appRouter } from "~/server/api/root.router";
export const openApiDocument = generateOpenApiDocument(v1ApiRouter, {
export const openApiDocument = generateOpenApiDocument(appRouter, {
title: "OpenPipe API",
description: "The public API for reporting API calls to OpenPipe",
version: "0.1.1",
baseUrl: "https://app.openpipe.ai/api/v1",
version: "0.1.0",
baseUrl: "https://app.openpipe.ai/api",
});
// Respond with our OpenAPI schema
const hander = (req: NextApiRequest, res: NextApiResponse) => {

View File

@@ -0,0 +1,6 @@
// A faulty API route to test Sentry's error monitoring
// @ts-expect-error just a test file, don't care about types
export default function handler(_req, res) {
throw new Error("Sentry Example API Route Error");
res.status(200).json({ name: "John Doe" });
}

View File

@@ -1,95 +0,0 @@
import type { ApiKey, Project } from "@prisma/client";
import { TRPCError, initTRPC } from "@trpc/server";
import { type CreateNextContextOptions } from "@trpc/server/adapters/next";
import superjson from "superjson";
import { type OpenApiMeta } from "trpc-openapi";
import { ZodError } from "zod";
import { prisma } from "~/server/db";
type CreateContextOptions = {
key:
| (ApiKey & {
project: Project;
})
| null;
};
/**
* This helper generates the "internals" for a tRPC context. If you need to use it, you can export
* it from here.
*
* Examples of things you may need it for:
* - testing, so we don't have to mock Next.js' req/res
* - tRPC's `createSSGHelpers`, where we don't have req/res
*
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
*/
export const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
key: opts.key,
};
};
export const createOpenApiContext = async (opts: CreateNextContextOptions) => {
const { req, res } = opts;
const apiKey = req.headers.authorization?.split(" ")[1] as string | null;
if (!apiKey) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const key = await prisma.apiKey.findUnique({
where: { apiKey },
include: { project: true },
});
if (!key) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return createInnerTRPCContext({
key,
});
};
export type TRPCContext = Awaited<ReturnType<typeof createOpenApiContext>>;
const t = initTRPC
.context<typeof createOpenApiContext>()
.meta<OpenApiMeta>()
.create({
transformer: superjson,
errorFormatter({ shape, error }) {
return {
...shape,
data: {
...shape.data,
zodError: error.cause instanceof ZodError ? error.cause.flatten() : null,
},
};
},
});
export const createOpenApiRouter = t.router;
export const openApiPublicProc = t.procedure;
/** Reusable middleware that enforces users are logged in before running the procedure. */
const enforceApiKey = t.middleware(async ({ ctx, next }) => {
if (!ctx.key) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return next({
ctx: { key: ctx.key },
});
});
/**
* Protected (authenticated) procedure
*
* If you want a query or mutation to ONLY be accessible to logged in users, use this. It verifies
* the session is valid and guarantees `ctx.session.user` is not null.
*
* @see https://trpc.io/docs/procedures
*/
export const openApiProtectedProc = t.procedure.use(enforceApiKey);

View File

@@ -8,6 +8,7 @@ import { evaluationsRouter } from "./routers/evaluations.router";
import { worldChampsRouter } from "./routers/worldChamps.router";
import { datasetsRouter } from "./routers/datasets.router";
import { datasetEntries } from "./routers/datasetEntries.router";
import { externalApiRouter } from "./routers/externalApi.router";
import { projectsRouter } from "./routers/projects.router";
import { dashboardRouter } from "./routers/dashboard.router";
import { loggedCallsRouter } from "./routers/loggedCalls.router";
@@ -30,6 +31,7 @@ export const appRouter = createTRPCRouter({
projects: projectsRouter,
dashboard: dashboardRouter,
loggedCalls: loggedCallsRouter,
externalApi: externalApiRouter,
});
// export type definition of API

View File

@@ -2,6 +2,9 @@ import { type Prisma } from "@prisma/client";
import { type JsonValue } from "type-fest";
import { z } from "zod";
import { v4 as uuidv4 } from "uuid";
import { TRPCError } from "@trpc/server";
import { createTRPCRouter, publicProcedure } from "~/server/api/trpc";
import { prisma } from "~/server/db";
import { hashRequest } from "~/server/utils/hashObject";
import modelProvider from "~/modelProviders/openai-ChatCompletion";
@@ -9,7 +12,6 @@ import {
type ChatCompletion,
type CompletionCreateParams,
} from "openai/resources/chat/completions";
import { createOpenApiRouter, openApiProtectedProc } from "./openApiTrpc";
const reqValidator = z.object({
model: z.string(),
@@ -26,12 +28,12 @@ const respValidator = z.object({
),
});
export const v1ApiRouter = createOpenApiRouter({
checkCache: openApiProtectedProc
export const externalApiRouter = createTRPCRouter({
checkCache: publicProcedure
.meta({
openapi: {
method: "POST",
path: "/check-cache",
path: "/v1/check-cache",
description: "Check if a prompt is cached",
protect: true,
},
@@ -45,8 +47,7 @@ export const v1ApiRouter = createOpenApiRouter({
.optional()
.describe(
'Extra tags to attach to the call for filtering. Eg { "userId": "123", "promptId": "populate-title" }',
)
.default({}),
),
}),
)
.output(
@@ -55,8 +56,18 @@ export const v1ApiRouter = createOpenApiRouter({
}),
)
.mutation(async ({ input, ctx }) => {
const apiKey = ctx.apiKey;
if (!apiKey) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const key = await prisma.apiKey.findUnique({
where: { apiKey },
});
if (!key) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const reqPayload = await reqValidator.spa(input.reqPayload);
const cacheKey = hashRequest(ctx.key.projectId, reqPayload as JsonValue);
const cacheKey = hashRequest(key.projectId, reqPayload as JsonValue);
const existingResponse = await prisma.loggedCallModelResponse.findFirst({
where: { cacheKey },
@@ -68,24 +79,23 @@ export const v1ApiRouter = createOpenApiRouter({
await prisma.loggedCall.create({
data: {
projectId: ctx.key.projectId,
projectId: key.projectId,
requestedAt: new Date(input.requestedAt),
cacheHit: true,
modelResponseId: existingResponse.id,
},
});
await createTags(existingResponse.originalLoggedCallId, input.tags);
return {
respPayload: existingResponse.respPayload,
};
}),
report: openApiProtectedProc
report: publicProcedure
.meta({
openapi: {
method: "POST",
path: "/report",
path: "/v1/report",
description: "Report an API call",
protect: true,
},
@@ -103,16 +113,26 @@ export const v1ApiRouter = createOpenApiRouter({
.optional()
.describe(
'Extra tags to attach to the call for filtering. Eg { "userId": "123", "promptId": "populate-title" }',
)
.default({}),
),
}),
)
.output(z.void())
.mutation(async ({ input, ctx }) => {
console.log("GOT TAGS", input.tags);
const apiKey = ctx.apiKey;
if (!apiKey) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const key = await prisma.apiKey.findUnique({
where: { apiKey },
});
if (!key) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
const reqPayload = await reqValidator.spa(input.reqPayload);
const respPayload = await respValidator.spa(input.respPayload);
const requestHash = hashRequest(ctx.key.projectId, reqPayload as JsonValue);
const requestHash = hashRequest(key.projectId, reqPayload as JsonValue);
const newLoggedCallId = uuidv4();
const newModelResponseId = uuidv4();
@@ -131,7 +151,7 @@ export const v1ApiRouter = createOpenApiRouter({
prisma.loggedCall.create({
data: {
id: newLoggedCallId,
projectId: ctx.key.projectId,
projectId: key.projectId,
requestedAt: new Date(input.requestedAt),
cacheHit: false,
model,
@@ -165,76 +185,14 @@ export const v1ApiRouter = createOpenApiRouter({
}),
]);
await createTags(newLoggedCallId, input.tags);
}),
localTestingOnlyGetLatestLoggedCall: openApiProtectedProc
.meta({
openapi: {
method: "GET",
path: "/local-testing-only-get-latest-logged-call",
description: "Get the latest logged call (only for local testing)",
protect: true, // Make sure to protect this endpoint
},
})
.input(z.void())
.output(
z
.object({
createdAt: z.date(),
cacheHit: z.boolean(),
tags: z.record(z.string().nullable()),
modelResponse: z
.object({
id: z.string(),
statusCode: z.number().nullable(),
errorMessage: z.string().nullable(),
reqPayload: z.unknown(),
respPayload: z.unknown(),
})
.nullable(),
})
.nullable(),
)
.mutation(async ({ ctx }) => {
if (process.env.NODE_ENV === "production") {
throw new Error("This operation is not allowed in production environment");
}
const latestLoggedCall = await prisma.loggedCall.findFirst({
where: { projectId: ctx.key.projectId },
orderBy: { requestedAt: "desc" },
select: {
createdAt: true,
cacheHit: true,
tags: true,
modelResponse: {
select: {
id: true,
statusCode: true,
errorMessage: true,
reqPayload: true,
respPayload: true,
},
},
},
const tagsToCreate = Object.entries(input.tags ?? {}).map(([name, value]) => ({
loggedCallId: newLoggedCallId,
// sanitize tags
name: name.replaceAll(/[^a-zA-Z0-9_]/g, "_"),
value,
}));
await prisma.loggedCallTag.createMany({
data: tagsToCreate,
});
return (
latestLoggedCall && {
...latestLoggedCall,
tags: Object.fromEntries(latestLoggedCall.tags.map((tag) => [tag.name, tag.value])),
}
);
}),
});
async function createTags(loggedCallId: string, tags: Record<string, string>) {
const tagsToCreate = Object.entries(tags).map(([name, value]) => ({
loggedCallId,
name: name.replaceAll(/[^a-zA-Z0-9_$]/g, "_"),
value,
}));
await prisma.loggedCallTag.createMany({
data: tagsToCreate,
});
}

View File

@@ -131,8 +131,6 @@ export const promptVariantsRouter = createTRPCRouter({
const inputTokens = overallTokens._sum?.inputTokens ?? 0;
const outputTokens = overallTokens._sum?.outputTokens ?? 0;
const awaitingCompletions = outputCount < scenarioCount;
const awaitingEvals = !!evalResults.find(
(result) => result.totalCount < scenarioCount * evals.length,
);
@@ -144,7 +142,6 @@ export const promptVariantsRouter = createTRPCRouter({
overallCost: overallTokens._sum?.cost ?? 0,
scenarioCount,
outputCount,
awaitingCompletions,
awaitingEvals,
};
}),

View File

@@ -27,6 +27,7 @@ import { capturePath } from "~/utils/analytics/serverAnalytics";
type CreateContextOptions = {
session: Session | null;
apiKey: string | null;
};
// eslint-disable-next-line @typescript-eslint/no-empty-function
@@ -45,6 +46,7 @@ const noOp = () => {};
export const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
session: opts.session,
apiKey: opts.apiKey,
prisma,
markAccessControlRun: noOp,
};
@@ -62,8 +64,11 @@ export const createTRPCContext = async (opts: CreateNextContextOptions) => {
// Get the session from the server using the getServerSession wrapper function
const session = await getServerAuthSession({ req, res });
const apiKey = req.headers.authorization?.split(" ")[1] as string | null;
return createInnerTRPCContext({
session,
apiKey,
});
};

View File

@@ -1,5 +1,5 @@
import "dotenv/config";
import { openApiDocument } from "~/pages/api/v1/openapi.json";
import { openApiDocument } from "~/pages/api/openapi.json";
import fs from "fs";
import path from "path";
import { execSync } from "child_process";

View File

@@ -51,7 +51,7 @@ const requestUpdatedPromptFunction = async (
originalModelProvider.inputSchema,
null,
2,
)}`,
)}\n\nDo not add any assistant messages.`,
},
{
role: "user",

View File

@@ -2,4 +2,4 @@ import cryptoRandomString from "crypto-random-string";
const KEY_LENGTH = 42;
export const generateApiKey = () => `opk_${cryptoRandomString({ length: KEY_LENGTH })}`;
export const generateApiKey = () => `opc_${cryptoRandomString({ length: KEY_LENGTH })}`;

View File

@@ -1,9 +0,0 @@
#! /bin/bash
set -e
cd "$(dirname "$0")/.."
source app/.env
docker build . --file app/Dockerfile

View File

@@ -3,17 +3,17 @@
"info": {
"title": "OpenPipe API",
"description": "The public API for reporting API calls to OpenPipe",
"version": "0.1.1"
"version": "0.1.0"
},
"servers": [
{
"url": "https://app.openpipe.ai/api/v1"
"url": "https://app.openpipe.ai/api"
}
],
"paths": {
"/check-cache": {
"/v1/check-cache": {
"post": {
"operationId": "checkCache",
"operationId": "externalApi-checkCache",
"description": "Check if a prompt is cached",
"security": [
{
@@ -39,8 +39,7 @@
"additionalProperties": {
"type": "string"
},
"description": "Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }",
"default": {}
"description": "Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }"
}
},
"required": [
@@ -75,9 +74,9 @@
}
}
},
"/report": {
"/v1/report": {
"post": {
"operationId": "report",
"operationId": "externalApi-report",
"description": "Report an API call",
"security": [
{
@@ -118,8 +117,7 @@
"additionalProperties": {
"type": "string"
},
"description": "Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }",
"default": {}
"description": "Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }"
}
},
"required": [
@@ -146,82 +144,6 @@
}
}
}
},
"/local-testing-only-get-latest-logged-call": {
"get": {
"operationId": "localTestingOnlyGetLatestLoggedCall",
"description": "Get the latest logged call (only for local testing)",
"security": [
{
"Authorization": []
}
],
"parameters": [],
"responses": {
"200": {
"description": "Successful response",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"createdAt": {
"type": "string",
"format": "date-time"
},
"cacheHit": {
"type": "boolean"
},
"tags": {
"type": "object",
"additionalProperties": {
"type": "string",
"nullable": true
}
},
"modelResponse": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"statusCode": {
"type": "number",
"nullable": true
},
"errorMessage": {
"type": "string",
"nullable": true
},
"reqPayload": {},
"respPayload": {}
},
"required": [
"id",
"statusCode",
"errorMessage"
],
"additionalProperties": false,
"nullable": true
}
},
"required": [
"createdAt",
"cacheHit",
"tags",
"modelResponse"
],
"additionalProperties": false,
"nullable": true
}
}
}
},
"default": {
"$ref": "#/components/responses/error"
}
}
}
}
},
"components": {

View File

@@ -5,14 +5,14 @@ import httpx
from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.check_cache_json_body import CheckCacheJsonBody
from ...models.check_cache_response_200 import CheckCacheResponse200
from ...models.external_api_check_cache_json_body import ExternalApiCheckCacheJsonBody
from ...models.external_api_check_cache_response_200 import ExternalApiCheckCacheResponse200
from ...types import Response
def _get_kwargs(
*,
json_body: CheckCacheJsonBody,
json_body: ExternalApiCheckCacheJsonBody,
) -> Dict[str, Any]:
pass
@@ -20,16 +20,16 @@ def _get_kwargs(
return {
"method": "post",
"url": "/check-cache",
"url": "/v1/check-cache",
"json": json_json_body,
}
def _parse_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Optional[CheckCacheResponse200]:
) -> Optional[ExternalApiCheckCacheResponse200]:
if response.status_code == HTTPStatus.OK:
response_200 = CheckCacheResponse200.from_dict(response.json())
response_200 = ExternalApiCheckCacheResponse200.from_dict(response.json())
return response_200
if client.raise_on_unexpected_status:
@@ -40,7 +40,7 @@ def _parse_response(
def _build_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Response[CheckCacheResponse200]:
) -> Response[ExternalApiCheckCacheResponse200]:
return Response(
status_code=HTTPStatus(response.status_code),
content=response.content,
@@ -52,19 +52,19 @@ def _build_response(
def sync_detailed(
*,
client: AuthenticatedClient,
json_body: CheckCacheJsonBody,
) -> Response[CheckCacheResponse200]:
json_body: ExternalApiCheckCacheJsonBody,
) -> Response[ExternalApiCheckCacheResponse200]:
"""Check if a prompt is cached
Args:
json_body (CheckCacheJsonBody):
json_body (ExternalApiCheckCacheJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[CheckCacheResponse200]
Response[ExternalApiCheckCacheResponse200]
"""
kwargs = _get_kwargs(
@@ -81,19 +81,19 @@ def sync_detailed(
def sync(
*,
client: AuthenticatedClient,
json_body: CheckCacheJsonBody,
) -> Optional[CheckCacheResponse200]:
json_body: ExternalApiCheckCacheJsonBody,
) -> Optional[ExternalApiCheckCacheResponse200]:
"""Check if a prompt is cached
Args:
json_body (CheckCacheJsonBody):
json_body (ExternalApiCheckCacheJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
CheckCacheResponse200
ExternalApiCheckCacheResponse200
"""
return sync_detailed(
@@ -105,19 +105,19 @@ def sync(
async def asyncio_detailed(
*,
client: AuthenticatedClient,
json_body: CheckCacheJsonBody,
) -> Response[CheckCacheResponse200]:
json_body: ExternalApiCheckCacheJsonBody,
) -> Response[ExternalApiCheckCacheResponse200]:
"""Check if a prompt is cached
Args:
json_body (CheckCacheJsonBody):
json_body (ExternalApiCheckCacheJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[CheckCacheResponse200]
Response[ExternalApiCheckCacheResponse200]
"""
kwargs = _get_kwargs(
@@ -132,19 +132,19 @@ async def asyncio_detailed(
async def asyncio(
*,
client: AuthenticatedClient,
json_body: CheckCacheJsonBody,
) -> Optional[CheckCacheResponse200]:
json_body: ExternalApiCheckCacheJsonBody,
) -> Optional[ExternalApiCheckCacheResponse200]:
"""Check if a prompt is cached
Args:
json_body (CheckCacheJsonBody):
json_body (ExternalApiCheckCacheJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
CheckCacheResponse200
ExternalApiCheckCacheResponse200
"""
return (

View File

@@ -5,13 +5,13 @@ import httpx
from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.report_json_body import ReportJsonBody
from ...models.external_api_report_json_body import ExternalApiReportJsonBody
from ...types import Response
def _get_kwargs(
*,
json_body: ReportJsonBody,
json_body: ExternalApiReportJsonBody,
) -> Dict[str, Any]:
pass
@@ -19,7 +19,7 @@ def _get_kwargs(
return {
"method": "post",
"url": "/report",
"url": "/v1/report",
"json": json_json_body,
}
@@ -45,12 +45,12 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt
def sync_detailed(
*,
client: AuthenticatedClient,
json_body: ReportJsonBody,
json_body: ExternalApiReportJsonBody,
) -> Response[Any]:
"""Report an API call
Args:
json_body (ReportJsonBody):
json_body (ExternalApiReportJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -74,12 +74,12 @@ def sync_detailed(
async def asyncio_detailed(
*,
client: AuthenticatedClient,
json_body: ReportJsonBody,
json_body: ExternalApiReportJsonBody,
) -> Response[Any]:
"""Report an API call
Args:
json_body (ReportJsonBody):
json_body (ExternalApiReportJsonBody):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.

View File

@@ -1,133 +0,0 @@
from http import HTTPStatus
from typing import Any, Dict, Optional, Union
import httpx
from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.local_testing_only_get_latest_logged_call_response_200 import (
LocalTestingOnlyGetLatestLoggedCallResponse200,
)
from ...types import Response
def _get_kwargs() -> Dict[str, Any]:
pass
return {
"method": "get",
"url": "/local-testing-only-get-latest-logged-call",
}
def _parse_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Optional[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
if response.status_code == HTTPStatus.OK:
_response_200 = response.json()
response_200: Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]
if _response_200 is None:
response_200 = None
else:
response_200 = LocalTestingOnlyGetLatestLoggedCallResponse200.from_dict(_response_200)
return response_200
if client.raise_on_unexpected_status:
raise errors.UnexpectedStatus(response.status_code, response.content)
else:
return None
def _build_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
return Response(
status_code=HTTPStatus(response.status_code),
content=response.content,
headers=response.headers,
parsed=_parse_response(client=client, response=response),
)
def sync_detailed(
*,
client: AuthenticatedClient,
) -> Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
"""Get the latest logged call (only for local testing)
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]
"""
kwargs = _get_kwargs()
response = client.get_httpx_client().request(
**kwargs,
)
return _build_response(client=client, response=response)
def sync(
*,
client: AuthenticatedClient,
) -> Optional[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
"""Get the latest logged call (only for local testing)
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]
"""
return sync_detailed(
client=client,
).parsed
async def asyncio_detailed(
*,
client: AuthenticatedClient,
) -> Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
"""Get the latest logged call (only for local testing)
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]
"""
kwargs = _get_kwargs()
response = await client.get_async_httpx_client().request(**kwargs)
return _build_response(client=client, response=response)
async def asyncio(
*,
client: AuthenticatedClient,
) -> Optional[Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]]:
"""Get the latest logged call (only for local testing)
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Optional[LocalTestingOnlyGetLatestLoggedCallResponse200]
"""
return (
await asyncio_detailed(
client=client,
)
).parsed

View File

@@ -1,25 +1,15 @@
""" Contains all the data models used in inputs/outputs """
from .check_cache_json_body import CheckCacheJsonBody
from .check_cache_json_body_tags import CheckCacheJsonBodyTags
from .check_cache_response_200 import CheckCacheResponse200
from .local_testing_only_get_latest_logged_call_response_200 import LocalTestingOnlyGetLatestLoggedCallResponse200
from .local_testing_only_get_latest_logged_call_response_200_model_response import (
LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse,
)
from .local_testing_only_get_latest_logged_call_response_200_tags import (
LocalTestingOnlyGetLatestLoggedCallResponse200Tags,
)
from .report_json_body import ReportJsonBody
from .report_json_body_tags import ReportJsonBodyTags
from .external_api_check_cache_json_body import ExternalApiCheckCacheJsonBody
from .external_api_check_cache_json_body_tags import ExternalApiCheckCacheJsonBodyTags
from .external_api_check_cache_response_200 import ExternalApiCheckCacheResponse200
from .external_api_report_json_body import ExternalApiReportJsonBody
from .external_api_report_json_body_tags import ExternalApiReportJsonBodyTags
__all__ = (
"CheckCacheJsonBody",
"CheckCacheJsonBodyTags",
"CheckCacheResponse200",
"LocalTestingOnlyGetLatestLoggedCallResponse200",
"LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse",
"LocalTestingOnlyGetLatestLoggedCallResponse200Tags",
"ReportJsonBody",
"ReportJsonBodyTags",
"ExternalApiCheckCacheJsonBody",
"ExternalApiCheckCacheJsonBodyTags",
"ExternalApiCheckCacheResponse200",
"ExternalApiReportJsonBody",
"ExternalApiReportJsonBodyTags",
)

View File

@@ -5,25 +5,25 @@ from attrs import define
from ..types import UNSET, Unset
if TYPE_CHECKING:
from ..models.check_cache_json_body_tags import CheckCacheJsonBodyTags
from ..models.external_api_check_cache_json_body_tags import ExternalApiCheckCacheJsonBodyTags
T = TypeVar("T", bound="CheckCacheJsonBody")
T = TypeVar("T", bound="ExternalApiCheckCacheJsonBody")
@define
class CheckCacheJsonBody:
class ExternalApiCheckCacheJsonBody:
"""
Attributes:
requested_at (float): Unix timestamp in milliseconds
req_payload (Union[Unset, Any]): JSON-encoded request payload
tags (Union[Unset, CheckCacheJsonBodyTags]): Extra tags to attach to the call for filtering. Eg { "userId":
"123", "promptId": "populate-title" }
tags (Union[Unset, ExternalApiCheckCacheJsonBodyTags]): Extra tags to attach to the call for filtering. Eg {
"userId": "123", "promptId": "populate-title" }
"""
requested_at: float
req_payload: Union[Unset, Any] = UNSET
tags: Union[Unset, "CheckCacheJsonBodyTags"] = UNSET
tags: Union[Unset, "ExternalApiCheckCacheJsonBodyTags"] = UNSET
def to_dict(self) -> Dict[str, Any]:
requested_at = self.requested_at
@@ -47,7 +47,7 @@ class CheckCacheJsonBody:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.check_cache_json_body_tags import CheckCacheJsonBodyTags
from ..models.external_api_check_cache_json_body_tags import ExternalApiCheckCacheJsonBodyTags
d = src_dict.copy()
requested_at = d.pop("requestedAt")
@@ -55,16 +55,16 @@ class CheckCacheJsonBody:
req_payload = d.pop("reqPayload", UNSET)
_tags = d.pop("tags", UNSET)
tags: Union[Unset, CheckCacheJsonBodyTags]
tags: Union[Unset, ExternalApiCheckCacheJsonBodyTags]
if isinstance(_tags, Unset):
tags = UNSET
else:
tags = CheckCacheJsonBodyTags.from_dict(_tags)
tags = ExternalApiCheckCacheJsonBodyTags.from_dict(_tags)
check_cache_json_body = cls(
external_api_check_cache_json_body = cls(
requested_at=requested_at,
req_payload=req_payload,
tags=tags,
)
return check_cache_json_body
return external_api_check_cache_json_body

View File

@@ -2,11 +2,11 @@ from typing import Any, Dict, List, Type, TypeVar
from attrs import define, field
T = TypeVar("T", bound="CheckCacheJsonBodyTags")
T = TypeVar("T", bound="ExternalApiCheckCacheJsonBodyTags")
@define
class CheckCacheJsonBodyTags:
class ExternalApiCheckCacheJsonBodyTags:
"""Extra tags to attach to the call for filtering. Eg { "userId": "123", "promptId": "populate-title" }"""
additional_properties: Dict[str, str] = field(init=False, factory=dict)
@@ -21,10 +21,10 @@ class CheckCacheJsonBodyTags:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
check_cache_json_body_tags = cls()
external_api_check_cache_json_body_tags = cls()
check_cache_json_body_tags.additional_properties = d
return check_cache_json_body_tags
external_api_check_cache_json_body_tags.additional_properties = d
return external_api_check_cache_json_body_tags
@property
def additional_keys(self) -> List[str]:

View File

@@ -4,11 +4,11 @@ from attrs import define
from ..types import UNSET, Unset
T = TypeVar("T", bound="CheckCacheResponse200")
T = TypeVar("T", bound="ExternalApiCheckCacheResponse200")
@define
class CheckCacheResponse200:
class ExternalApiCheckCacheResponse200:
"""
Attributes:
resp_payload (Union[Unset, Any]): JSON-encoded response payload
@@ -31,8 +31,8 @@ class CheckCacheResponse200:
d = src_dict.copy()
resp_payload = d.pop("respPayload", UNSET)
check_cache_response_200 = cls(
external_api_check_cache_response_200 = cls(
resp_payload=resp_payload,
)
return check_cache_response_200
return external_api_check_cache_response_200

View File

@@ -5,14 +5,14 @@ from attrs import define
from ..types import UNSET, Unset
if TYPE_CHECKING:
from ..models.report_json_body_tags import ReportJsonBodyTags
from ..models.external_api_report_json_body_tags import ExternalApiReportJsonBodyTags
T = TypeVar("T", bound="ReportJsonBody")
T = TypeVar("T", bound="ExternalApiReportJsonBody")
@define
class ReportJsonBody:
class ExternalApiReportJsonBody:
"""
Attributes:
requested_at (float): Unix timestamp in milliseconds
@@ -21,8 +21,8 @@ class ReportJsonBody:
resp_payload (Union[Unset, Any]): JSON-encoded response payload
status_code (Union[Unset, float]): HTTP status code of response
error_message (Union[Unset, str]): User-friendly error message
tags (Union[Unset, ReportJsonBodyTags]): Extra tags to attach to the call for filtering. Eg { "userId": "123",
"promptId": "populate-title" }
tags (Union[Unset, ExternalApiReportJsonBodyTags]): Extra tags to attach to the call for filtering. Eg {
"userId": "123", "promptId": "populate-title" }
"""
requested_at: float
@@ -31,7 +31,7 @@ class ReportJsonBody:
resp_payload: Union[Unset, Any] = UNSET
status_code: Union[Unset, float] = UNSET
error_message: Union[Unset, str] = UNSET
tags: Union[Unset, "ReportJsonBodyTags"] = UNSET
tags: Union[Unset, "ExternalApiReportJsonBodyTags"] = UNSET
def to_dict(self) -> Dict[str, Any]:
requested_at = self.requested_at
@@ -66,7 +66,7 @@ class ReportJsonBody:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.report_json_body_tags import ReportJsonBodyTags
from ..models.external_api_report_json_body_tags import ExternalApiReportJsonBodyTags
d = src_dict.copy()
requested_at = d.pop("requestedAt")
@@ -82,13 +82,13 @@ class ReportJsonBody:
error_message = d.pop("errorMessage", UNSET)
_tags = d.pop("tags", UNSET)
tags: Union[Unset, ReportJsonBodyTags]
tags: Union[Unset, ExternalApiReportJsonBodyTags]
if isinstance(_tags, Unset):
tags = UNSET
else:
tags = ReportJsonBodyTags.from_dict(_tags)
tags = ExternalApiReportJsonBodyTags.from_dict(_tags)
report_json_body = cls(
external_api_report_json_body = cls(
requested_at=requested_at,
received_at=received_at,
req_payload=req_payload,
@@ -98,4 +98,4 @@ class ReportJsonBody:
tags=tags,
)
return report_json_body
return external_api_report_json_body

View File

@@ -2,11 +2,11 @@ from typing import Any, Dict, List, Type, TypeVar
from attrs import define, field
T = TypeVar("T", bound="ReportJsonBodyTags")
T = TypeVar("T", bound="ExternalApiReportJsonBodyTags")
@define
class ReportJsonBodyTags:
class ExternalApiReportJsonBodyTags:
"""Extra tags to attach to the call for filtering. Eg { "userId": "123", "promptId": "populate-title" }"""
additional_properties: Dict[str, str] = field(init=False, factory=dict)
@@ -21,10 +21,10 @@ class ReportJsonBodyTags:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
report_json_body_tags = cls()
external_api_report_json_body_tags = cls()
report_json_body_tags.additional_properties = d
return report_json_body_tags
external_api_report_json_body_tags.additional_properties = d
return external_api_report_json_body_tags
@property
def additional_keys(self) -> List[str]:

View File

@@ -1,84 +0,0 @@
import datetime
from typing import TYPE_CHECKING, Any, Dict, Optional, Type, TypeVar
from attrs import define
from dateutil.parser import isoparse
if TYPE_CHECKING:
from ..models.local_testing_only_get_latest_logged_call_response_200_model_response import (
LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse,
)
from ..models.local_testing_only_get_latest_logged_call_response_200_tags import (
LocalTestingOnlyGetLatestLoggedCallResponse200Tags,
)
T = TypeVar("T", bound="LocalTestingOnlyGetLatestLoggedCallResponse200")
@define
class LocalTestingOnlyGetLatestLoggedCallResponse200:
"""
Attributes:
created_at (datetime.datetime):
cache_hit (bool):
tags (LocalTestingOnlyGetLatestLoggedCallResponse200Tags):
model_response (Optional[LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse]):
"""
created_at: datetime.datetime
cache_hit: bool
tags: "LocalTestingOnlyGetLatestLoggedCallResponse200Tags"
model_response: Optional["LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse"]
def to_dict(self) -> Dict[str, Any]:
created_at = self.created_at.isoformat()
cache_hit = self.cache_hit
tags = self.tags.to_dict()
model_response = self.model_response.to_dict() if self.model_response else None
field_dict: Dict[str, Any] = {}
field_dict.update(
{
"createdAt": created_at,
"cacheHit": cache_hit,
"tags": tags,
"modelResponse": model_response,
}
)
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.local_testing_only_get_latest_logged_call_response_200_model_response import (
LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse,
)
from ..models.local_testing_only_get_latest_logged_call_response_200_tags import (
LocalTestingOnlyGetLatestLoggedCallResponse200Tags,
)
d = src_dict.copy()
created_at = isoparse(d.pop("createdAt"))
cache_hit = d.pop("cacheHit")
tags = LocalTestingOnlyGetLatestLoggedCallResponse200Tags.from_dict(d.pop("tags"))
_model_response = d.pop("modelResponse")
model_response: Optional[LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse]
if _model_response is None:
model_response = None
else:
model_response = LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse.from_dict(_model_response)
local_testing_only_get_latest_logged_call_response_200 = cls(
created_at=created_at,
cache_hit=cache_hit,
tags=tags,
model_response=model_response,
)
return local_testing_only_get_latest_logged_call_response_200

View File

@@ -1,70 +0,0 @@
from typing import Any, Dict, Optional, Type, TypeVar, Union
from attrs import define
from ..types import UNSET, Unset
T = TypeVar("T", bound="LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse")
@define
class LocalTestingOnlyGetLatestLoggedCallResponse200ModelResponse:
"""
Attributes:
id (str):
status_code (Optional[float]):
error_message (Optional[str]):
req_payload (Union[Unset, Any]):
resp_payload (Union[Unset, Any]):
"""
id: str
status_code: Optional[float]
error_message: Optional[str]
req_payload: Union[Unset, Any] = UNSET
resp_payload: Union[Unset, Any] = UNSET
def to_dict(self) -> Dict[str, Any]:
id = self.id
status_code = self.status_code
error_message = self.error_message
req_payload = self.req_payload
resp_payload = self.resp_payload
field_dict: Dict[str, Any] = {}
field_dict.update(
{
"id": id,
"statusCode": status_code,
"errorMessage": error_message,
}
)
if req_payload is not UNSET:
field_dict["reqPayload"] = req_payload
if resp_payload is not UNSET:
field_dict["respPayload"] = resp_payload
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
id = d.pop("id")
status_code = d.pop("statusCode")
error_message = d.pop("errorMessage")
req_payload = d.pop("reqPayload", UNSET)
resp_payload = d.pop("respPayload", UNSET)
local_testing_only_get_latest_logged_call_response_200_model_response = cls(
id=id,
status_code=status_code,
error_message=error_message,
req_payload=req_payload,
resp_payload=resp_payload,
)
return local_testing_only_get_latest_logged_call_response_200_model_response

View File

@@ -1,43 +0,0 @@
from typing import Any, Dict, List, Optional, Type, TypeVar
from attrs import define, field
T = TypeVar("T", bound="LocalTestingOnlyGetLatestLoggedCallResponse200Tags")
@define
class LocalTestingOnlyGetLatestLoggedCallResponse200Tags:
""" """
additional_properties: Dict[str, Optional[str]] = field(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
local_testing_only_get_latest_logged_call_response_200_tags = cls()
local_testing_only_get_latest_logged_call_response_200_tags.additional_properties = d
return local_testing_only_get_latest_logged_call_response_200_tags
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Optional[str]:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Optional[str]) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties

View File

@@ -1,9 +1,9 @@
from typing import Any, Optional
def merge_openai_chunks(base: Optional[Any], chunk: Any) -> Any:
def merge_streamed_chunks(base: Optional[Any], chunk: Any) -> Any:
if base is None:
return merge_openai_chunks({**chunk, "choices": []}, chunk)
return merge_streamed_chunks({**chunk, "choices": []}, chunk)
choices = base["choices"].copy()
for choice in chunk["choices"]:
@@ -34,7 +34,9 @@ def merge_openai_chunks(base: Optional[Any], chunk: Any) -> Any:
{**new_choice, "message": {"role": "assistant", **choice["delta"]}}
)
return {
merged = {
**base,
"choices": choices,
}
return merged

View File

@@ -3,16 +3,9 @@ from openai.openai_object import OpenAIObject
import time
import inspect
from openpipe.merge_openai_chunks import merge_openai_chunks
from openpipe.openpipe_meta import OpenPipeMeta
from openpipe.merge_openai_chunks import merge_streamed_chunks
from .shared import (
_should_check_cache,
maybe_check_cache,
maybe_check_cache_async,
report_async,
report,
)
from .shared import maybe_check_cache, maybe_check_cache_async, report_async, report
class WrappedChatCompletion(original_openai.ChatCompletion):
@@ -36,15 +29,9 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
def _gen():
assembled_completion = None
for chunk in chat_completion:
assembled_completion = merge_openai_chunks(
assembled_completion = merge_streamed_chunks(
assembled_completion, chunk
)
cache_status = (
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
)
chunk.openpipe = OpenPipeMeta(cache_status=cache_status)
yield chunk
received_at = int(time.time() * 1000)
@@ -71,10 +58,6 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
status_code=200,
)
cache_status = (
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
)
chat_completion["openpipe"] = OpenPipeMeta(cache_status=cache_status)
return chat_completion
except Exception as e:
received_at = int(time.time() * 1000)
@@ -113,28 +96,21 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
requested_at = int(time.time() * 1000)
try:
chat_completion = await original_openai.ChatCompletion.acreate(
*args, **kwargs
)
chat_completion = original_openai.ChatCompletion.acreate(*args, **kwargs)
if inspect.isasyncgen(chat_completion):
if inspect.isgenerator(chat_completion):
async def _gen():
def _gen():
assembled_completion = None
async for chunk in chat_completion:
assembled_completion = merge_openai_chunks(
for chunk in chat_completion:
assembled_completion = merge_streamed_chunks(
assembled_completion, chunk
)
cache_status = (
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
)
chunk.openpipe = OpenPipeMeta(cache_status=cache_status)
yield chunk
received_at = int(time.time() * 1000)
await report_async(
report_async(
openpipe_options=openpipe_options,
requested_at=requested_at,
received_at=received_at,
@@ -147,7 +123,7 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
else:
received_at = int(time.time() * 1000)
await report_async(
report_async(
openpipe_options=openpipe_options,
requested_at=requested_at,
received_at=received_at,
@@ -156,17 +132,12 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
status_code=200,
)
cache_status = (
"MISS" if _should_check_cache(openpipe_options) else "SKIP"
)
chat_completion["openpipe"] = OpenPipeMeta(cache_status=cache_status)
return chat_completion
except Exception as e:
received_at = int(time.time() * 1000)
if isinstance(e, original_openai.OpenAIError):
await report_async(
report_async(
openpipe_options=openpipe_options,
requested_at=requested_at,
received_at=received_at,
@@ -176,7 +147,7 @@ class WrappedChatCompletion(original_openai.ChatCompletion):
status_code=e.http_status,
)
else:
await report_async(
report_async(
openpipe_options=openpipe_options,
requested_at=requested_at,
received_at=received_at,

View File

@@ -1,7 +0,0 @@
from attr import dataclass
@dataclass
class OpenPipeMeta:
# Cache status. One of 'HIT', 'MISS', 'SKIP'
cache_status: str

View File

@@ -1,10 +1,10 @@
from openpipe.api_client.api.default import (
report as api_report,
check_cache,
external_api_report,
external_api_check_cache,
)
from openpipe.api_client.client import AuthenticatedClient
from openpipe.api_client.models.report_json_body_tags import (
ReportJsonBodyTags,
from openpipe.api_client.models.external_api_report_json_body_tags import (
ExternalApiReportJsonBodyTags,
)
import toml
import time
@@ -21,7 +21,7 @@ def _get_tags(openpipe_options):
tags["$sdk"] = "python"
tags["$sdk_version"] = version
return ReportJsonBodyTags.from_dict(tags)
return ExternalApiReportJsonBodyTags.from_dict(tags)
def _should_check_cache(openpipe_options):
@@ -31,7 +31,7 @@ def _should_check_cache(openpipe_options):
def _process_cache_payload(
payload: check_cache.CheckCacheResponse200,
payload: external_api_check_cache.ExternalApiCheckCacheResponse200,
):
if not payload or not payload.resp_payload:
return None
@@ -47,9 +47,9 @@ def maybe_check_cache(
if not _should_check_cache(openpipe_options):
return None
try:
payload = check_cache.sync(
payload = external_api_check_cache.sync(
client=configured_client,
json_body=check_cache.CheckCacheJsonBody(
json_body=external_api_check_cache.ExternalApiCheckCacheJsonBody(
req_payload=req_payload,
requested_at=int(time.time() * 1000),
tags=_get_tags(openpipe_options),
@@ -72,9 +72,9 @@ async def maybe_check_cache_async(
return None
try:
payload = await check_cache.asyncio(
payload = await external_api_check_cache.asyncio(
client=configured_client,
json_body=check_cache.CheckCacheJsonBody(
json_body=external_api_check_cache.ExternalApiCheckCacheJsonBody(
req_payload=req_payload,
requested_at=int(time.time() * 1000),
tags=_get_tags(openpipe_options),
@@ -94,9 +94,9 @@ def report(
**kwargs,
):
try:
api_report.sync_detailed(
external_api_report.sync_detailed(
client=configured_client,
json_body=api_report.ReportJsonBody(
json_body=external_api_report.ExternalApiReportJsonBody(
**kwargs,
tags=_get_tags(openpipe_options),
),
@@ -112,9 +112,9 @@ async def report_async(
**kwargs,
):
try:
await api_report.asyncio_detailed(
await external_api_report.asyncio_detailed(
client=configured_client,
json_body=api_report.ReportJsonBody(
json_body=external_api_report.ExternalApiReportJsonBody(
**kwargs,
tags=_get_tags(openpipe_options),
),

View File

@@ -1,106 +1,55 @@
from functools import reduce
from dotenv import load_dotenv
from . import openai, configure_openpipe
import os
import pytest
from . import openai, configure_openpipe, configured_client
from .api_client.api.default import local_testing_only_get_latest_logged_call
from .merge_openai_chunks import merge_openai_chunks
import random
import string
def random_string(length):
letters = string.ascii_lowercase
return "".join(random.choice(letters) for i in range(length))
load_dotenv()
openai.api_key = os.getenv("OPENAI_API_KEY")
configure_openpipe(
base_url="http://localhost:3000/api/v1", api_key=os.getenv("OPENPIPE_API_KEY")
base_url="http://localhost:3000/api", api_key=os.getenv("OPENPIPE_API_KEY")
)
def last_logged_call():
return local_testing_only_get_latest_logged_call.sync(client=configured_client)
def test_sync():
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{"role": "system", "content": "count to 3"}],
messages=[{"role": "system", "content": "count to 10"}],
)
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== completion.choices[0].message.content
)
assert (
last_logged.model_response.req_payload["messages"][0]["content"] == "count to 3"
)
assert completion.openpipe.cache_status == "SKIP"
print(completion.choices[0].message.content)
def test_streaming():
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{"role": "system", "content": "count to 4"}],
messages=[{"role": "system", "content": "count to 10"}],
stream=True,
)
merged = reduce(merge_openai_chunks, completion, None)
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== merged["choices"][0]["message"]["content"]
)
for chunk in completion:
print(chunk)
async def test_async():
completion = await openai.ChatCompletion.acreate(
acompletion = await openai.ChatCompletion.acreate(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "count down from 5"}],
)
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== completion.choices[0].message.content
)
assert (
last_logged.model_response.req_payload["messages"][0]["content"]
== "count down from 5"
)
assert completion.openpipe.cache_status == "SKIP"
print(acompletion.choices[0].message.content)
async def test_async_streaming():
completion = await openai.ChatCompletion.acreate(
acompletion = await openai.ChatCompletion.acreate(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "count down from 5"}],
stream=True,
)
merged = None
async for chunk in completion:
assert chunk.openpipe.cache_status == "SKIP"
merged = merge_openai_chunks(merged, chunk)
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== merged["choices"][0]["message"]["content"]
)
assert (
last_logged.model_response.req_payload["messages"][0]["content"]
== "count down from 5"
)
assert merged["openpipe"].cache_status == "SKIP"
async for chunk in acompletion:
print(chunk)
def test_sync_with_tags():
@@ -109,54 +58,31 @@ def test_sync_with_tags():
messages=[{"role": "system", "content": "count to 10"}],
openpipe={"tags": {"promptId": "testprompt"}},
)
print("finished")
last_logged = last_logged_call()
assert (
last_logged.model_response.resp_payload["choices"][0]["message"]["content"]
== completion.choices[0].message.content
)
print(last_logged.tags)
assert last_logged.tags["promptId"] == "testprompt"
assert last_logged.tags["$sdk"] == "python"
print(completion.choices[0].message.content)
def test_bad_call():
try:
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo-blaster",
messages=[{"role": "system", "content": "count to 10"}],
stream=True,
)
assert False
except Exception as e:
pass
last_logged = last_logged_call()
print(last_logged)
assert (
last_logged.model_response.error_message
== "The model `gpt-3.5-turbo-blaster` does not exist"
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo-blaster",
messages=[{"role": "system", "content": "count to 10"}],
stream=True,
)
assert last_logged.model_response.status_code == 404
@pytest.mark.focus
async def test_caching():
messages = [{"role": "system", "content": f"repeat '{random_string(10)}'"}]
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=messages,
messages=[{"role": "system", "content": "count to 10"}],
openpipe={"cache": True},
)
assert completion.openpipe.cache_status == "MISS"
first_logged = last_logged_call()
assert (
completion.choices[0].message.content
== first_logged.model_response.resp_payload["choices"][0]["message"]["content"]
)
completion2 = await openai.ChatCompletion.acreate(
model="gpt-3.5-turbo",
messages=[{"role": "system", "content": "count to 10"}],
openpipe={"cache": True},
)
assert completion2.openpipe.cache_status == "HIT"
print(completion2)

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.1
* The version of the OpenAPI document: 0.1.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
@@ -26,193 +26,125 @@ import { BASE_PATH, COLLECTION_FORMATS, BaseAPI, RequiredError } from './base';
/**
*
* @export
* @interface CheckCache200Response
* @interface ExternalApiCheckCache200Response
*/
export interface CheckCache200Response {
export interface ExternalApiCheckCache200Response {
/**
* JSON-encoded response payload
* @type {any}
* @memberof CheckCache200Response
* @memberof ExternalApiCheckCache200Response
*/
'respPayload'?: any;
}
/**
*
* @export
* @interface CheckCacheDefaultResponse
* @interface ExternalApiCheckCacheDefaultResponse
*/
export interface CheckCacheDefaultResponse {
export interface ExternalApiCheckCacheDefaultResponse {
/**
*
* @type {string}
* @memberof CheckCacheDefaultResponse
* @memberof ExternalApiCheckCacheDefaultResponse
*/
'message': string;
/**
*
* @type {string}
* @memberof CheckCacheDefaultResponse
* @memberof ExternalApiCheckCacheDefaultResponse
*/
'code': string;
/**
*
* @type {Array<CheckCacheDefaultResponseIssuesInner>}
* @memberof CheckCacheDefaultResponse
* @type {Array<ExternalApiCheckCacheDefaultResponseIssuesInner>}
* @memberof ExternalApiCheckCacheDefaultResponse
*/
'issues'?: Array<CheckCacheDefaultResponseIssuesInner>;
'issues'?: Array<ExternalApiCheckCacheDefaultResponseIssuesInner>;
}
/**
*
* @export
* @interface CheckCacheDefaultResponseIssuesInner
* @interface ExternalApiCheckCacheDefaultResponseIssuesInner
*/
export interface CheckCacheDefaultResponseIssuesInner {
export interface ExternalApiCheckCacheDefaultResponseIssuesInner {
/**
*
* @type {string}
* @memberof CheckCacheDefaultResponseIssuesInner
* @memberof ExternalApiCheckCacheDefaultResponseIssuesInner
*/
'message': string;
}
/**
*
* @export
* @interface CheckCacheRequest
* @interface ExternalApiCheckCacheRequest
*/
export interface CheckCacheRequest {
export interface ExternalApiCheckCacheRequest {
/**
* Unix timestamp in milliseconds
* @type {number}
* @memberof CheckCacheRequest
* @memberof ExternalApiCheckCacheRequest
*/
'requestedAt': number;
/**
* JSON-encoded request payload
* @type {any}
* @memberof CheckCacheRequest
* @memberof ExternalApiCheckCacheRequest
*/
'reqPayload'?: any;
/**
* Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }
* @type {{ [key: string]: string; }}
* @memberof CheckCacheRequest
* @memberof ExternalApiCheckCacheRequest
*/
'tags'?: { [key: string]: string; };
}
/**
*
* @export
* @interface LocalTestingOnlyGetLatestLoggedCall200Response
* @interface ExternalApiReportRequest
*/
export interface LocalTestingOnlyGetLatestLoggedCall200Response {
/**
*
* @type {string}
* @memberof LocalTestingOnlyGetLatestLoggedCall200Response
*/
'createdAt': string;
/**
*
* @type {boolean}
* @memberof LocalTestingOnlyGetLatestLoggedCall200Response
*/
'cacheHit': boolean;
/**
*
* @type {{ [key: string]: string; }}
* @memberof LocalTestingOnlyGetLatestLoggedCall200Response
*/
'tags': { [key: string]: string; };
/**
*
* @type {LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse}
* @memberof LocalTestingOnlyGetLatestLoggedCall200Response
*/
'modelResponse': LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse | null;
}
/**
*
* @export
* @interface LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
export interface LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse {
/**
*
* @type {string}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'id': string;
/**
*
* @type {number}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'statusCode': number | null;
/**
*
* @type {string}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'errorMessage': string | null;
/**
*
* @type {any}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'reqPayload'?: any;
/**
*
* @type {any}
* @memberof LocalTestingOnlyGetLatestLoggedCall200ResponseModelResponse
*/
'respPayload'?: any;
}
/**
*
* @export
* @interface ReportRequest
*/
export interface ReportRequest {
export interface ExternalApiReportRequest {
/**
* Unix timestamp in milliseconds
* @type {number}
* @memberof ReportRequest
* @memberof ExternalApiReportRequest
*/
'requestedAt': number;
/**
* Unix timestamp in milliseconds
* @type {number}
* @memberof ReportRequest
* @memberof ExternalApiReportRequest
*/
'receivedAt': number;
/**
* JSON-encoded request payload
* @type {any}
* @memberof ReportRequest
* @memberof ExternalApiReportRequest
*/
'reqPayload'?: any;
/**
* JSON-encoded response payload
* @type {any}
* @memberof ReportRequest
* @memberof ExternalApiReportRequest
*/
'respPayload'?: any;
/**
* HTTP status code of response
* @type {number}
* @memberof ReportRequest
* @memberof ExternalApiReportRequest
*/
'statusCode'?: number;
/**
* User-friendly error message
* @type {string}
* @memberof ReportRequest
* @memberof ExternalApiReportRequest
*/
'errorMessage'?: string;
/**
* Extra tags to attach to the call for filtering. Eg { \"userId\": \"123\", \"promptId\": \"populate-title\" }
* @type {{ [key: string]: string; }}
* @memberof ReportRequest
* @memberof ExternalApiReportRequest
*/
'tags'?: { [key: string]: string; };
}
@@ -225,14 +157,14 @@ export const DefaultApiAxiosParamCreator = function (configuration?: Configurati
return {
/**
* Check if a prompt is cached
* @param {CheckCacheRequest} checkCacheRequest
* @param {ExternalApiCheckCacheRequest} externalApiCheckCacheRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
checkCache: async (checkCacheRequest: CheckCacheRequest, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'checkCacheRequest' is not null or undefined
assertParamExists('checkCache', 'checkCacheRequest', checkCacheRequest)
const localVarPath = `/check-cache`;
externalApiCheckCache: async (externalApiCheckCacheRequest: ExternalApiCheckCacheRequest, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'externalApiCheckCacheRequest' is not null or undefined
assertParamExists('externalApiCheckCache', 'externalApiCheckCacheRequest', externalApiCheckCacheRequest)
const localVarPath = `/v1/check-cache`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
@@ -255,40 +187,7 @@ export const DefaultApiAxiosParamCreator = function (configuration?: Configurati
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(checkCacheRequest, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
options: localVarRequestOptions,
};
},
/**
* Get the latest logged call (only for local testing)
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
localTestingOnlyGetLatestLoggedCall: async (options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
const localVarPath = `/local-testing-only-get-latest-logged-call`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
if (configuration) {
baseOptions = configuration.baseOptions;
}
const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
const localVarHeaderParameter = {} as any;
const localVarQueryParameter = {} as any;
// authentication Authorization required
// http bearer authentication required
await setBearerAuthToObject(localVarHeaderParameter, configuration)
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(externalApiCheckCacheRequest, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
@@ -297,14 +196,14 @@ export const DefaultApiAxiosParamCreator = function (configuration?: Configurati
},
/**
* Report an API call
* @param {ReportRequest} reportRequest
* @param {ExternalApiReportRequest} externalApiReportRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
report: async (reportRequest: ReportRequest, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'reportRequest' is not null or undefined
assertParamExists('report', 'reportRequest', reportRequest)
const localVarPath = `/report`;
externalApiReport: async (externalApiReportRequest: ExternalApiReportRequest, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'externalApiReportRequest' is not null or undefined
assertParamExists('externalApiReport', 'externalApiReportRequest', externalApiReportRequest)
const localVarPath = `/v1/report`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
@@ -327,7 +226,7 @@ export const DefaultApiAxiosParamCreator = function (configuration?: Configurati
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(reportRequest, localVarRequestOptions, configuration)
localVarRequestOptions.data = serializeDataIfNeeded(externalApiReportRequest, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
@@ -346,31 +245,22 @@ export const DefaultApiFp = function(configuration?: Configuration) {
return {
/**
* Check if a prompt is cached
* @param {CheckCacheRequest} checkCacheRequest
* @param {ExternalApiCheckCacheRequest} externalApiCheckCacheRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async checkCache(checkCacheRequest: CheckCacheRequest, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<CheckCache200Response>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.checkCache(checkCacheRequest, options);
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
},
/**
* Get the latest logged call (only for local testing)
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async localTestingOnlyGetLatestLoggedCall(options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<LocalTestingOnlyGetLatestLoggedCall200Response>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.localTestingOnlyGetLatestLoggedCall(options);
async externalApiCheckCache(externalApiCheckCacheRequest: ExternalApiCheckCacheRequest, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<ExternalApiCheckCache200Response>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.externalApiCheckCache(externalApiCheckCacheRequest, options);
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
},
/**
* Report an API call
* @param {ReportRequest} reportRequest
* @param {ExternalApiReportRequest} externalApiReportRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async report(reportRequest: ReportRequest, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.report(reportRequest, options);
async externalApiReport(externalApiReportRequest: ExternalApiReportRequest, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.externalApiReport(externalApiReportRequest, options);
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
},
}
@@ -385,29 +275,21 @@ export const DefaultApiFactory = function (configuration?: Configuration, basePa
return {
/**
* Check if a prompt is cached
* @param {CheckCacheRequest} checkCacheRequest
* @param {ExternalApiCheckCacheRequest} externalApiCheckCacheRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
checkCache(checkCacheRequest: CheckCacheRequest, options?: any): AxiosPromise<CheckCache200Response> {
return localVarFp.checkCache(checkCacheRequest, options).then((request) => request(axios, basePath));
},
/**
* Get the latest logged call (only for local testing)
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
localTestingOnlyGetLatestLoggedCall(options?: any): AxiosPromise<LocalTestingOnlyGetLatestLoggedCall200Response> {
return localVarFp.localTestingOnlyGetLatestLoggedCall(options).then((request) => request(axios, basePath));
externalApiCheckCache(externalApiCheckCacheRequest: ExternalApiCheckCacheRequest, options?: any): AxiosPromise<ExternalApiCheckCache200Response> {
return localVarFp.externalApiCheckCache(externalApiCheckCacheRequest, options).then((request) => request(axios, basePath));
},
/**
* Report an API call
* @param {ReportRequest} reportRequest
* @param {ExternalApiReportRequest} externalApiReportRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
report(reportRequest: ReportRequest, options?: any): AxiosPromise<any> {
return localVarFp.report(reportRequest, options).then((request) => request(axios, basePath));
externalApiReport(externalApiReportRequest: ExternalApiReportRequest, options?: any): AxiosPromise<any> {
return localVarFp.externalApiReport(externalApiReportRequest, options).then((request) => request(axios, basePath));
},
};
};
@@ -421,34 +303,24 @@ export const DefaultApiFactory = function (configuration?: Configuration, basePa
export class DefaultApi extends BaseAPI {
/**
* Check if a prompt is cached
* @param {CheckCacheRequest} checkCacheRequest
* @param {ExternalApiCheckCacheRequest} externalApiCheckCacheRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public checkCache(checkCacheRequest: CheckCacheRequest, options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).checkCache(checkCacheRequest, options).then((request) => request(this.axios, this.basePath));
}
/**
* Get the latest logged call (only for local testing)
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public localTestingOnlyGetLatestLoggedCall(options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).localTestingOnlyGetLatestLoggedCall(options).then((request) => request(this.axios, this.basePath));
public externalApiCheckCache(externalApiCheckCacheRequest: ExternalApiCheckCacheRequest, options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).externalApiCheckCache(externalApiCheckCacheRequest, options).then((request) => request(this.axios, this.basePath));
}
/**
* Report an API call
* @param {ReportRequest} reportRequest
* @param {ExternalApiReportRequest} externalApiReportRequest
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public report(reportRequest: ReportRequest, options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).report(reportRequest, options).then((request) => request(this.axios, this.basePath));
public externalApiReport(externalApiReportRequest: ExternalApiReportRequest, options?: AxiosRequestConfig) {
return DefaultApiFp(this.configuration).externalApiReport(externalApiReportRequest, options).then((request) => request(this.axios, this.basePath));
}
}

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.1
* The version of the OpenAPI document: 0.1.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
@@ -19,7 +19,7 @@ import type { Configuration } from './configuration';
import type { AxiosPromise, AxiosInstance, AxiosRequestConfig } from 'axios';
import globalAxios from 'axios';
export const BASE_PATH = "https://app.openpipe.ai/api/v1".replace(/\/+$/, "");
export const BASE_PATH = "https://app.openpipe.ai/api".replace(/\/+$/, "");
/**
*

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.1
* The version of the OpenAPI document: 0.1.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.1
* The version of the OpenAPI document: 0.1.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* OpenPipe API
* The public API for reporting API calls to OpenPipe
*
* The version of the OpenAPI document: 0.1.1
* The version of the OpenAPI document: 0.1.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -7,8 +7,9 @@ databases:
services:
- type: web
name: querykey-prod-web
rootDir: app
env: docker
dockerfilePath: ./app/Dockerfile
dockerfilePath: Dockerfile
dockerContext: .
plan: standard
domains:
@@ -31,8 +32,9 @@ services:
- type: web
name: querykey-prod-wss
rootDir: app
env: docker
dockerfilePath: ./app/Dockerfile
dockerfilePath: Dockerfile
dockerContext: .
plan: free
dockerCommand: pnpm tsx src/wss-server.ts