Add smoketest (broken)

This commit is contained in:
David Corbitt
2023-07-30 14:34:37 -07:00
parent 0a0c5c5dda
commit 11985a0dcc
9 changed files with 64 additions and 23 deletions

View File

@@ -12,6 +12,7 @@ declare module "nextjs-routes" {
export type Route = export type Route =
| StaticRoute<"/account/signin"> | StaticRoute<"/account/signin">
| DynamicRoute<"/api/[...trpc]", { "trpc": string[] }>
| DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }> | DynamicRoute<"/api/auth/[...nextauth]", { "nextauth": string[] }>
| StaticRoute<"/api/openapi"> | StaticRoute<"/api/openapi">
| DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }> | DynamicRoute<"/api/trpc/[trpc]", { "trpc": string }>

View File

@@ -61,6 +61,7 @@
"next": "^13.4.2", "next": "^13.4.2",
"next-auth": "^4.22.1", "next-auth": "^4.22.1",
"next-query-params": "^4.2.3", "next-query-params": "^4.2.3",
"nextjs-cors": "^2.1.2",
"nextjs-routes": "^2.0.1", "nextjs-routes": "^2.0.1",
"openai": "4.0.0-beta.2", "openai": "4.0.0-beta.2",
"pluralize": "^8.0.0", "pluralize": "^8.0.0",

12
app/pnpm-lock.yaml generated
View File

@@ -125,6 +125,9 @@ dependencies:
next-query-params: next-query-params:
specifier: ^4.2.3 specifier: ^4.2.3
version: 4.2.3(next@13.4.2)(react@18.2.0)(use-query-params@2.2.1) version: 4.2.3(next@13.4.2)(react@18.2.0)(use-query-params@2.2.1)
nextjs-cors:
specifier: ^2.1.2
version: 2.1.2(next@13.4.2)
nextjs-routes: nextjs-routes:
specifier: ^2.0.1 specifier: ^2.0.1
version: 2.0.1(next@13.4.2) version: 2.0.1(next@13.4.2)
@@ -6184,6 +6187,15 @@ packages:
- babel-plugin-macros - babel-plugin-macros
dev: false dev: false
/nextjs-cors@2.1.2(next@13.4.2):
resolution: {integrity: sha512-2yOVivaaf2ILe4f/qY32hnj3oC77VCOsUQJQfhVMGsXE/YMEWUY2zy78sH9FKUCM7eG42/l3pDofIzMD781XGA==}
peerDependencies:
next: ^8.1.1-canary.54 || ^9.0.0 || ^10.0.0-0 || ^11.0.0 || ^12.0.0 || ^13.0.0
dependencies:
cors: 2.8.5
next: 13.4.2(@babel/core@7.22.9)(react-dom@18.2.0)(react@18.2.0)
dev: false
/nextjs-routes@2.0.1(next@13.4.2): /nextjs-routes@2.0.1(next@13.4.2):
resolution: {integrity: sha512-pBGRm6uR44zwUjWWYn6+gwz08BhBbqUYlIzsbNHAh1TWohHYKWFaa2YVsj8BxEo726MZYg87OJPnHpaaY1ia0w==} resolution: {integrity: sha512-pBGRm6uR44zwUjWWYn6+gwz08BhBbqUYlIzsbNHAh1TWohHYKWFaa2YVsj8BxEo726MZYg87OJPnHpaaY1ia0w==}
hasBin: true hasBin: true

View File

@@ -0,0 +1,22 @@
import { type NextApiRequest, type NextApiResponse } from "next";
import cors from "nextjs-cors";
import { createOpenApiNextHandler } from "trpc-openapi";
import { createProcedureCache } from "trpc-openapi/dist/adapters/node-http/procedures";
import { appRouter } from "~/server/api/root.router";
import { createTRPCContext } from "~/server/api/trpc";
const openApiHandler = createOpenApiNextHandler({
router: appRouter,
createContext: createTRPCContext,
});
const cache = createProcedureCache(appRouter);
const handler = async (req: NextApiRequest, res: NextApiResponse) => {
// Setup CORS
await cors(req, res);
return openApiHandler(req, res);
};
export default handler;

View File

@@ -11,7 +11,7 @@ import { initTRPC, TRPCError } from "@trpc/server";
import { type CreateNextContextOptions } from "@trpc/server/adapters/next"; import { type CreateNextContextOptions } from "@trpc/server/adapters/next";
import { type Session } from "next-auth"; import { type Session } from "next-auth";
import superjson from "superjson"; import superjson from "superjson";
import { OpenApiMeta } from "trpc-openapi"; import { type OpenApiMeta } from "trpc-openapi";
import { ZodError } from "zod"; import { ZodError } from "zod";
import { getServerAuthSession } from "~/server/auth"; import { getServerAuthSession } from "~/server/auth";
import { prisma } from "~/server/db"; import { prisma } from "~/server/db";

View File

@@ -36,10 +36,7 @@ export class OpenPipeApi {
modelProvider: T, modelProvider: T,
promptFunction: () => PromptTypes[T], promptFunction: () => PromptTypes[T],
scenarioVariables: Record<string, unknown> scenarioVariables: Record<string, unknown>
): Promise<{ ) {
id?: string;
prompt: PromptTypes[T];
}> {
const prompt = promptFunction() as PromptTypes[T]; const prompt = promptFunction() as PromptTypes[T];
if (!prompt) { if (!prompt) {
console.error("Prompt function returned null", promptFunction.toString()); console.error("Prompt function returned null", promptFunction.toString());
@@ -65,11 +62,14 @@ export class OpenPipeApi {
return { return {
id: resp?.data?.loggedCallId, id: resp?.data?.loggedCallId,
prompt, prompt: prompt!,
} };
} }
public async captureResponse(loggedCallId: string | undefined, responsePayload: any): Promise<void> { public async captureResponse(
loggedCallId: string | undefined,
responsePayload: any
): Promise<void> {
if (!loggedCallId) { if (!loggedCallId) {
console.error("No call log ID provided to captureResponse"); console.error("No call log ID provided to captureResponse");
return; return;
@@ -84,6 +84,5 @@ export class OpenPipeApi {
} catch (err) { } catch (err) {
console.error("Error reporting to OpenPipe", err); console.error("Error reporting to OpenPipe", err);
} }
} }
} }

View File

@@ -4,7 +4,7 @@
"description": "", "description": "",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1" "smoketest": "tsx ./tests/smoketest.ts"
}, },
"keywords": [], "keywords": [],
"author": "", "author": "",
@@ -14,6 +14,7 @@
"openai": "^3.3.0" "openai": "^3.3.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^20.4.5",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"tsx": "^3.12.7", "tsx": "^3.12.7",
"typescript": "^5.1.6" "typescript": "^5.1.6"

View File

@@ -13,6 +13,9 @@ dependencies:
version: 3.3.0 version: 3.3.0
devDependencies: devDependencies:
'@types/node':
specifier: ^20.4.5
version: 20.4.5
dotenv: dotenv:
specifier: ^16.3.1 specifier: ^16.3.1
version: 16.3.1 version: 16.3.1
@@ -244,6 +247,10 @@ packages:
dev: true dev: true
optional: true optional: true
/@types/node@20.4.5:
resolution: {integrity: sha512-rt40Nk13II9JwQBdeYqmbn2Q6IVTA5uPhvSO+JVqdXw/6/4glI6oR9ezty/A9Hg5u7JH4OmYmuQ+XvjKm0Datg==}
dev: true
/asynckit@0.4.0: /asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
dev: false dev: false

View File

@@ -1,7 +1,13 @@
import "dotenv/config"; import "dotenv/config";
import { Configuration, OpenPipeApi } from "../index"; import { Configuration, OpenAIApi } from 'openai'
import { Configuration as OPConfiguration, OpenPipeApi } from "..";
const config = new Configuration({
export const openAIConfig = new Configuration({ apiKey: process.env.OPENAI_API_KEY });
const openai = new OpenAIApi(openAIConfig);
const config = new OPConfiguration({
opOptions: { opOptions: {
apiKey: "mock-key", apiKey: "mock-key",
basePath: "http://localhost:3000/api", basePath: "http://localhost:3000/api",
@@ -11,7 +17,7 @@ const config = new Configuration({
const client = new OpenPipeApi(config); const client = new OpenPipeApi(config);
async function main() { async function main() {
const prompt = client.capturePrompt( const prompt = await client.capturePrompt(
"123", "123",
"openai/ChatCompletion", "openai/ChatCompletion",
() => ({ () => ({
@@ -26,17 +32,9 @@ async function main() {
{} {}
); );
const response = await client.createChatCompletion({ const response = await openai.createChatCompletion(prompt.prompt);
model: "gpt-3.5-turbo",
messages: [
{
role: "system",
content: "count to 3 in french",
},
],
});
console.log("got response", response.data); console.log("got response", response.data.choices[0].message);
} }
main().catch((err) => { main().catch((err) => {