TypeScript SDK mostly working
Ok so this is still pretty rough, and notably there's no reporting for streaming. But for non-streaming requests I've verified that this does in fact report requests locally.
This commit is contained in:
@@ -72,6 +72,7 @@
|
||||
"nextjs-cors": "^2.1.2",
|
||||
"nextjs-routes": "^2.0.1",
|
||||
"openai": "4.0.0-beta.7",
|
||||
"openpipe": "workspace:*",
|
||||
"pg": "^8.11.2",
|
||||
"pluralize": "^8.0.0",
|
||||
"posthog-js": "^1.75.3",
|
||||
@@ -100,8 +101,7 @@
|
||||
"uuid": "^9.0.0",
|
||||
"vite-tsconfig-paths": "^4.2.0",
|
||||
"zod": "^3.21.4",
|
||||
"zustand": "^4.3.9",
|
||||
"openpipe": "workspace:*"
|
||||
"zustand": "^4.3.9"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@openapi-contrib/openapi-schema-to-json-schema": "^4.0.5",
|
||||
@@ -129,6 +129,7 @@
|
||||
"eslint-plugin-unused-imports": "^2.0.0",
|
||||
"monaco-editor": "^0.40.0",
|
||||
"openapi-typescript": "^6.3.4",
|
||||
"openapi-typescript-codegen": "^0.25.0",
|
||||
"prisma": "^4.14.0",
|
||||
"raw-loader": "^4.0.2",
|
||||
"typescript": "^5.0.4",
|
||||
|
||||
@@ -1,54 +1,10 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
import {
|
||||
type ChatCompletionChunk,
|
||||
type ChatCompletion,
|
||||
type CompletionCreateParams,
|
||||
} from "openai/resources/chat";
|
||||
import { type CompletionResponse } from "../types";
|
||||
import { isArray, isString, omit } from "lodash-es";
|
||||
import { openai } from "~/server/utils/openai";
|
||||
import { isArray, isString } from "lodash-es";
|
||||
import { APIError } from "openai";
|
||||
|
||||
const mergeStreamedChunks = (
|
||||
base: ChatCompletion | null,
|
||||
chunk: ChatCompletionChunk,
|
||||
): ChatCompletion => {
|
||||
if (base === null) {
|
||||
return mergeStreamedChunks({ ...chunk, choices: [] }, chunk);
|
||||
}
|
||||
|
||||
const choices = [...base.choices];
|
||||
for (const choice of chunk.choices) {
|
||||
const baseChoice = choices.find((c) => c.index === choice.index);
|
||||
if (baseChoice) {
|
||||
baseChoice.finish_reason = choice.finish_reason ?? baseChoice.finish_reason;
|
||||
baseChoice.message = baseChoice.message ?? { role: "assistant" };
|
||||
|
||||
if (choice.delta?.content)
|
||||
baseChoice.message.content =
|
||||
((baseChoice.message.content as string) ?? "") + (choice.delta.content ?? "");
|
||||
if (choice.delta?.function_call) {
|
||||
const fnCall = baseChoice.message.function_call ?? {};
|
||||
fnCall.name =
|
||||
((fnCall.name as string) ?? "") + ((choice.delta.function_call.name as string) ?? "");
|
||||
fnCall.arguments =
|
||||
((fnCall.arguments as string) ?? "") +
|
||||
((choice.delta.function_call.arguments as string) ?? "");
|
||||
}
|
||||
} else {
|
||||
// @ts-expect-error the types are correctly telling us that finish_reason
|
||||
// could be null, but don't want to fix it right now.
|
||||
choices.push({ ...omit(choice, "delta"), message: { role: "assistant", ...choice.delta } });
|
||||
}
|
||||
}
|
||||
|
||||
const merged: ChatCompletion = {
|
||||
...base,
|
||||
choices,
|
||||
};
|
||||
|
||||
return merged;
|
||||
};
|
||||
import { type ChatCompletion, type CompletionCreateParams } from "openai/resources/chat";
|
||||
import mergeChunks from "openpipe/src/openai/mergeChunks";
|
||||
import { openai } from "~/server/utils/openai";
|
||||
import { type CompletionResponse } from "../types";
|
||||
|
||||
export async function getCompletion(
|
||||
input: CompletionCreateParams,
|
||||
@@ -59,7 +15,6 @@ export async function getCompletion(
|
||||
|
||||
try {
|
||||
if (onStream) {
|
||||
console.log("got started");
|
||||
const resp = await openai.chat.completions.create(
|
||||
{ ...input, stream: true },
|
||||
{
|
||||
@@ -67,11 +22,9 @@ export async function getCompletion(
|
||||
},
|
||||
);
|
||||
for await (const part of resp) {
|
||||
console.log("got part", part);
|
||||
finalCompletion = mergeStreamedChunks(finalCompletion, part);
|
||||
finalCompletion = mergeChunks(finalCompletion, part);
|
||||
onStream(finalCompletion);
|
||||
}
|
||||
console.log("got final", finalCompletion);
|
||||
if (!finalCompletion) {
|
||||
return {
|
||||
type: "error",
|
||||
|
||||
3
app/src/server/api/external/v1Api.router.ts
vendored
3
app/src/server/api/external/v1Api.router.ts
vendored
@@ -107,7 +107,7 @@ export const v1ApiRouter = createOpenApiRouter({
|
||||
.default({}),
|
||||
}),
|
||||
)
|
||||
.output(z.void())
|
||||
.output(z.object({ status: z.literal("ok") }))
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const reqPayload = await reqValidator.spa(input.reqPayload);
|
||||
const respPayload = await respValidator.spa(input.respPayload);
|
||||
@@ -166,6 +166,7 @@ export const v1ApiRouter = createOpenApiRouter({
|
||||
]);
|
||||
|
||||
await createTags(newLoggedCallId, input.tags);
|
||||
return { status: "ok" };
|
||||
}),
|
||||
localTestingOnlyGetLatestLoggedCall: openApiProtectedProc
|
||||
.meta({
|
||||
|
||||
@@ -3,6 +3,7 @@ import { openApiDocument } from "~/pages/api/v1/openapi.json";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { execSync } from "child_process";
|
||||
import { generate } from "openapi-typescript-codegen";
|
||||
|
||||
const scriptPath = import.meta.url.replace("file://", "");
|
||||
const clientLibsPath = path.join(path.dirname(scriptPath), "../../../../client-libs");
|
||||
@@ -18,13 +19,20 @@ console.log("Generating TypeScript client");
|
||||
const tsClientPath = path.join(clientLibsPath, "typescript/src/codegen");
|
||||
|
||||
fs.rmSync(tsClientPath, { recursive: true, force: true });
|
||||
fs.mkdirSync(tsClientPath, { recursive: true });
|
||||
|
||||
execSync(
|
||||
`pnpm dlx @openapitools/openapi-generator-cli generate -i "${schemaPath}" -g typescript-axios -o "${tsClientPath}"`,
|
||||
{
|
||||
stdio: "inherit",
|
||||
},
|
||||
);
|
||||
await generate({
|
||||
input: openApiDocument,
|
||||
output: tsClientPath,
|
||||
clientName: "OPClient",
|
||||
httpClient: "node",
|
||||
});
|
||||
// execSync(
|
||||
// `pnpm run openapi generate --input "${schemaPath}" --output "${tsClientPath}" --name OPClient --client node`,
|
||||
// {
|
||||
// stdio: "inherit",
|
||||
// },
|
||||
// );
|
||||
|
||||
console.log("Generating Python client");
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { type ClientOptions } from "openai";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import OpenAI from "openpipe/src/openai";
|
||||
import OpenAI, { type ClientOptions } from "openpipe/src/openai";
|
||||
|
||||
import { env } from "~/env.mjs";
|
||||
|
||||
@@ -16,7 +15,13 @@ try {
|
||||
config = JSON.parse(jsonData.toString());
|
||||
} catch (error) {
|
||||
// Set a dummy key so it doesn't fail at build time
|
||||
config = { apiKey: env.OPENAI_API_KEY ?? "dummy-key" };
|
||||
config = {
|
||||
apiKey: env.OPENAI_API_KEY ?? "dummy-key",
|
||||
openpipe: {
|
||||
apiKey: env.OPENPIPE_API_KEY,
|
||||
baseUrl: "http://localhost:3000/api/v1",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// export const openai = env.OPENPIPE_API_KEY ? new OpenAI.OpenAI(config) : new OriginalOpenAI(config);
|
||||
|
||||
Reference in New Issue
Block a user