Publish the ingestion library to NPM (#204)

* Update client libs typescript README

* Create index.d.ts files

* Publish the ingestion library to NPM

Library is now published at https://www.npmjs.com/package/openpipe; see README for details.

* Rename package.json in /dist folder

* Increment patch version

* Increment package version

* Add newline to publish.sh

---------

Co-authored-by: David Corbitt <davidlcorbitt@gmail.com>
This commit is contained in:
Kyle Corbitt
2023-08-29 12:18:57 -07:00
committed by GitHub
parent 70fae68225
commit 1684663ddc
27 changed files with 79 additions and 225 deletions

27
client-libs/typescript/build.sh Executable file
View File

@@ -0,0 +1,27 @@
#!/usr/bin/env bash
# Adapted from https://github.com/openai/openai-node/blob/master/build
set -exuo pipefail
rm -rf dist /tmp/openpipe-build-dist
mkdir /tmp/openpipe-build-dist
cp -rp * /tmp/openpipe-build-dist
# Rename package name in package.json
python3 -c "
import json
with open('/tmp/openpipe-build-dist/package.json', 'r') as f:
data = json.load(f)
data['name'] = 'openpipe'
with open('/tmp/openpipe-build-dist/package.json', 'w') as f:
json.dump(data, f, indent=4)
"
rm -rf /tmp/openpipe-build-dist/node_modules
mv /tmp/openpipe-build-dist dist
# build to .js files
(cd dist && npm exec tsc -- --noEmit false)

View File

@@ -1,3 +1 @@
// main.ts or index.ts at the root level
export * as OpenAI from "./src/openai";
export * as OpenAILegacy from "./src/openai-legacy";
export * as openai from "./openai";

View File

@@ -80,6 +80,7 @@ test("bad call streaming", async () => {
stream: true,
});
} catch (e) {
// @ts-expect-error need to check for error type
await e.openpipe.reportingFinished;
const lastLogged = await lastLoggedCall();
expect(lastLogged?.modelResponse?.errorMessage).toEqual(
@@ -96,7 +97,9 @@ test("bad call", async () => {
messages: [{ role: "system", content: "count to 10" }],
});
} catch (e) {
// @ts-expect-error need to check for error type
assert("openpipe" in e);
// @ts-expect-error need to check for error type
await e.openpipe.reportingFinished;
const lastLogged = await lastLoggedCall();
expect(lastLogged?.modelResponse?.errorMessage).toEqual(
@@ -120,7 +123,8 @@ test("caching", async () => {
await completion.openpipe.reportingFinished;
const firstLogged = await lastLoggedCall();
expect(completion.choices[0].message.content).toEqual(
expect(completion.choices[0]?.message.content).toEqual(
firstLogged?.modelResponse?.respPayload.choices[0].message.content,
);

View File

@@ -1,14 +1,17 @@
{
"name": "openpipe",
"version": "0.1.0",
"name": "openpipe-dev",
"version": "0.3.3",
"type": "module",
"description": "Metrics and auto-evaluation for LLM calls",
"scripts": {
"build": "tsc",
"build": "./build.sh",
"test": "vitest"
},
"main": "dist/index.js",
"types": "dist/index.d.ts",
"main": "./index.ts",
"publishConfig": {
"access": "public",
"main": "./index.js"
},
"keywords": [],
"author": "",
"license": "Apache-2.0",

View File

@@ -0,0 +1,9 @@
#!/usr/bin/env bash
# Adapted from https://github.com/openai/openai-node/blob/master/build
set -exuo pipefail
./build.sh
(cd dist && pnpm publish --access public)

View File

@@ -1,4 +1,5 @@
import pkg from "../package.json";
import pkg from "./package.json";
import { DefaultService } from "./codegen";
export type OpenPipeConfig = {

View File

@@ -1,85 +0,0 @@
import * as openPipeClient from "../codegen";
import * as openai from "openai-legacy";
import { version } from "../../package.json";
// Anything we don't override we want to pass through to openai directly
export * as openAILegacy from "openai-legacy";
type OPConfigurationParameters = {
apiKey?: string;
basePath?: string;
};
export class Configuration extends openai.Configuration {
public qkConfig?: openPipeClient.Configuration;
constructor(
config: openai.ConfigurationParameters & {
opParameters?: OPConfigurationParameters;
}
) {
super(config);
if (config.opParameters) {
this.qkConfig = new openPipeClient.Configuration(config.opParameters);
}
}
}
type CreateChatCompletion = InstanceType<typeof openai.OpenAIApi>["createChatCompletion"];
export class OpenAIApi extends openai.OpenAIApi {
public openPipeApi?: openPipeClient.DefaultApi;
constructor(config: Configuration) {
super(config);
if (config.qkConfig) {
this.openPipeApi = new openPipeClient.DefaultApi(config.qkConfig);
}
}
public async createChatCompletion(
createChatCompletionRequest: Parameters<CreateChatCompletion>[0],
options?: Parameters<CreateChatCompletion>[1]
): ReturnType<CreateChatCompletion> {
const requestedAt = Date.now();
let resp: Awaited<ReturnType<CreateChatCompletion>> | null = null;
let respPayload: openai.CreateChatCompletionResponse | null = null;
let statusCode: number | undefined = undefined;
let errorMessage: string | undefined;
try {
resp = await super.createChatCompletion(createChatCompletionRequest, options);
respPayload = resp.data;
statusCode = resp.status;
} catch (err) {
console.error("Error in createChatCompletion");
if ("isAxiosError" in err && err.isAxiosError) {
errorMessage = err.response?.data?.error?.message;
respPayload = err.response?.data;
statusCode = err.response?.status;
} else if ("message" in err) {
errorMessage = err.message.toString();
}
throw err;
} finally {
this.openPipeApi
?.externalApiReport({
requestedAt,
receivedAt: Date.now(),
reqPayload: createChatCompletionRequest,
respPayload: respPayload,
statusCode: statusCode,
errorMessage,
tags: {
client: "openai-js",
clientVersion: version,
},
})
.catch((err) => {
console.error("Error reporting to OP", err);
});
}
console.log("done");
return resp;
}
}

View File

@@ -14,9 +14,12 @@
"isolatedModules": true,
"incremental": true,
"noUncheckedIndexedAccess": true,
"baseUrl": ".",
"outDir": "dist"
"noEmit": true,
"sourceMap": true,
"declaration": true,
"declarationMap": true,
"rootDir": "."
},
"include": ["src/**/*.ts"],
"include": ["**/*.ts"],
"exclude": ["node_modules"]
}