format with prettier 3
This commit is contained in:
1
.prettierignore
Normal file
1
.prettierignore
Normal file
@@ -0,0 +1 @@
|
|||||||
|
src/codegen/openai.schema.json
|
||||||
1
.prettierrc
Normal file
1
.prettierrc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{ "printWidth": 100 }
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
# <img src="https://github.com/openpipe/openpipe/assets/41524992/3fec1f7f-f55d-43e9-bfb9-fa709a618b49" width="36" height="36"> OpenPipe
|
# <img src="https://github.com/openpipe/openpipe/assets/41524992/3fec1f7f-f55d-43e9-bfb9-fa709a618b49" width="36" height="36"> OpenPipe
|
||||||
|
|
||||||
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts with realistic sample data.
|
OpenPipe is a flexible playground for comparing and optimizing LLM prompts. It lets you quickly generate, test and compare candidate prompts with realistic sample data.
|
||||||
@@ -16,7 +15,7 @@ Set up multiple prompt configurations and compare their output side-by-side. Eac
|
|||||||
Inspect prompt completions side-by-side.
|
Inspect prompt completions side-by-side.
|
||||||
|
|
||||||
**Test Many Inputs**
|
**Test Many Inputs**
|
||||||
OpenPipe lets you *template* a prompt. Use the templating feature to run the prompts you're testing against many potential inputs for broader coverage of your problem space than you'd get with manual testing.
|
OpenPipe lets you _template_ a prompt. Use the templating feature to run the prompts you're testing against many potential inputs for broader coverage of your problem space than you'd get with manual testing.
|
||||||
|
|
||||||
**🪄 Auto-generate Test Scenarios**
|
**🪄 Auto-generate Test Scenarios**
|
||||||
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
|
OpenPipe includes a tool to generate new test scenarios based on your existing prompts and scenarios. Just click "Autogenerate Scenario" to try it out!
|
||||||
@@ -32,6 +31,7 @@ Natively supports [OpenAI function calls](https://openai.com/blog/function-calli
|
|||||||
<img height="300px" alt="function calls" src="https://github.com/openpipe/openpipe/assets/176426/48ad13fe-af2f-4294-bf32-62015597fd9b">
|
<img height="300px" alt="function calls" src="https://github.com/openpipe/openpipe/assets/176426/48ad13fe-af2f-4294-bf32-62015597fd9b">
|
||||||
|
|
||||||
## Supported Models
|
## Supported Models
|
||||||
|
|
||||||
OpenPipe currently supports GPT-3.5 and GPT-4. Wider model support is planned.
|
OpenPipe currently supports GPT-3.5 and GPT-4. Wider model support is planned.
|
||||||
|
|
||||||
## Running Locally
|
## Running Locally
|
||||||
|
|||||||
@@ -74,6 +74,7 @@
|
|||||||
"eslint": "^8.40.0",
|
"eslint": "^8.40.0",
|
||||||
"eslint-config-next": "^13.4.2",
|
"eslint-config-next": "^13.4.2",
|
||||||
"eslint-plugin-unused-imports": "^2.0.0",
|
"eslint-plugin-unused-imports": "^2.0.0",
|
||||||
|
"prettier": "^3.0.0",
|
||||||
"prisma": "^4.14.0",
|
"prisma": "^4.14.0",
|
||||||
"typescript": "^5.0.4",
|
"typescript": "^5.0.4",
|
||||||
"yaml": "^2.3.1"
|
"yaml": "^2.3.1"
|
||||||
@@ -83,8 +84,5 @@
|
|||||||
},
|
},
|
||||||
"prisma": {
|
"prisma": {
|
||||||
"seed": "tsx prisma/seed.ts"
|
"seed": "tsx prisma/seed.ts"
|
||||||
},
|
|
||||||
"prettier": {
|
|
||||||
"printWidth": 100
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ const convertOpenApiToJsonSchema = async (url: string) => {
|
|||||||
// Write the JSON schema to a file in the current directory
|
// Write the JSON schema to a file in the current directory
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
path.join(currentDirectory, "openai.schema.json"),
|
path.join(currentDirectory, "openai.schema.json"),
|
||||||
JSON.stringify(jsonSchema, null, 2)
|
JSON.stringify(jsonSchema, null, 2),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -20,9 +20,7 @@
|
|||||||
"/chat/completions": {
|
"/chat/completions": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createChatCompletion",
|
"operationId": "createChatCompletion",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Creates a model response for the given chat conversation.",
|
"summary": "Creates a model response for the given chat conversation.",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -64,9 +62,7 @@
|
|||||||
"/completions": {
|
"/completions": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createCompletion",
|
"operationId": "createCompletion",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Creates a completion for the provided prompt and parameters.",
|
"summary": "Creates a completion for the provided prompt and parameters.",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -107,9 +103,7 @@
|
|||||||
"/edits": {
|
"/edits": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createEdit",
|
"operationId": "createEdit",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Creates a new edit for the provided input, instruction, and parameters.",
|
"summary": "Creates a new edit for the provided input, instruction, and parameters.",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -150,9 +144,7 @@
|
|||||||
"/images/generations": {
|
"/images/generations": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createImage",
|
"operationId": "createImage",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Creates an image given a prompt.",
|
"summary": "Creates an image given a prompt.",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -194,9 +186,7 @@
|
|||||||
"/images/edits": {
|
"/images/edits": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createImageEdit",
|
"operationId": "createImageEdit",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Creates an edited or extended image given an original image and a prompt.",
|
"summary": "Creates an edited or extended image given an original image and a prompt.",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -237,9 +227,7 @@
|
|||||||
"/images/variations": {
|
"/images/variations": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createImageVariation",
|
"operationId": "createImageVariation",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Creates a variation of a given image.",
|
"summary": "Creates a variation of a given image.",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -280,9 +268,7 @@
|
|||||||
"/embeddings": {
|
"/embeddings": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createEmbedding",
|
"operationId": "createEmbedding",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Creates an embedding vector representing the input text.",
|
"summary": "Creates an embedding vector representing the input text.",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -323,9 +309,7 @@
|
|||||||
"/audio/transcriptions": {
|
"/audio/transcriptions": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createTranscription",
|
"operationId": "createTranscription",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Transcribes audio into the input language.",
|
"summary": "Transcribes audio into the input language.",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -367,9 +351,7 @@
|
|||||||
"/audio/translations": {
|
"/audio/translations": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createTranslation",
|
"operationId": "createTranslation",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Translates audio into English.",
|
"summary": "Translates audio into English.",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -411,9 +393,7 @@
|
|||||||
"/files": {
|
"/files": {
|
||||||
"get": {
|
"get": {
|
||||||
"operationId": "listFiles",
|
"operationId": "listFiles",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Returns a list of files that belong to the user's organization.",
|
"summary": "Returns a list of files that belong to the user's organization.",
|
||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
@@ -441,9 +421,7 @@
|
|||||||
},
|
},
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createFile",
|
"operationId": "createFile",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Upload a file that contains document(s) to be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB. Please contact us if you need to increase the storage limit.\n",
|
"summary": "Upload a file that contains document(s) to be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB. Please contact us if you need to increase the storage limit.\n",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -483,9 +461,7 @@
|
|||||||
"/files/{file_id}": {
|
"/files/{file_id}": {
|
||||||
"delete": {
|
"delete": {
|
||||||
"operationId": "deleteFile",
|
"operationId": "deleteFile",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Delete a file.",
|
"summary": "Delete a file.",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
@@ -524,9 +500,7 @@
|
|||||||
},
|
},
|
||||||
"get": {
|
"get": {
|
||||||
"operationId": "retrieveFile",
|
"operationId": "retrieveFile",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Returns information about a specific file.",
|
"summary": "Returns information about a specific file.",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
@@ -567,9 +541,7 @@
|
|||||||
"/files/{file_id}/content": {
|
"/files/{file_id}/content": {
|
||||||
"get": {
|
"get": {
|
||||||
"operationId": "downloadFile",
|
"operationId": "downloadFile",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Returns the contents of the specified file",
|
"summary": "Returns the contents of the specified file",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
@@ -609,9 +581,7 @@
|
|||||||
"/fine-tunes": {
|
"/fine-tunes": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createFineTune",
|
"operationId": "createFineTune",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Creates a job that fine-tunes a specified model from a given dataset.\n\nResponse includes details of the enqueued job including job status and the name of the fine-tuned models once complete.\n\n[Learn more about Fine-tuning](/docs/guides/fine-tuning)\n",
|
"summary": "Creates a job that fine-tunes a specified model from a given dataset.\n\nResponse includes details of the enqueued job including job status and the name of the fine-tuned models once complete.\n\n[Learn more about Fine-tuning](/docs/guides/fine-tuning)\n",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -649,9 +619,7 @@
|
|||||||
},
|
},
|
||||||
"get": {
|
"get": {
|
||||||
"operationId": "listFineTunes",
|
"operationId": "listFineTunes",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "List your organization's fine-tuning jobs\n",
|
"summary": "List your organization's fine-tuning jobs\n",
|
||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
@@ -681,9 +649,7 @@
|
|||||||
"/fine-tunes/{fine_tune_id}": {
|
"/fine-tunes/{fine_tune_id}": {
|
||||||
"get": {
|
"get": {
|
||||||
"operationId": "retrieveFineTune",
|
"operationId": "retrieveFineTune",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Gets info about the fine-tune job.\n\n[Learn more about Fine-tuning](/docs/guides/fine-tuning)\n",
|
"summary": "Gets info about the fine-tune job.\n\n[Learn more about Fine-tuning](/docs/guides/fine-tuning)\n",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
@@ -725,9 +691,7 @@
|
|||||||
"/fine-tunes/{fine_tune_id}/cancel": {
|
"/fine-tunes/{fine_tune_id}/cancel": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "cancelFineTune",
|
"operationId": "cancelFineTune",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Immediately cancel a fine-tune job.\n",
|
"summary": "Immediately cancel a fine-tune job.\n",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
@@ -769,9 +733,7 @@
|
|||||||
"/fine-tunes/{fine_tune_id}/events": {
|
"/fine-tunes/{fine_tune_id}/events": {
|
||||||
"get": {
|
"get": {
|
||||||
"operationId": "listFineTuneEvents",
|
"operationId": "listFineTuneEvents",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Get fine-grained status updates for a fine-tune job.\n",
|
"summary": "Get fine-grained status updates for a fine-tune job.\n",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
@@ -823,9 +785,7 @@
|
|||||||
"/models": {
|
"/models": {
|
||||||
"get": {
|
"get": {
|
||||||
"operationId": "listModels",
|
"operationId": "listModels",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Lists the currently available models, and provides basic information about each one such as the owner and availability.",
|
"summary": "Lists the currently available models, and provides basic information about each one such as the owner and availability.",
|
||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
@@ -855,9 +815,7 @@
|
|||||||
"/models/{model}": {
|
"/models/{model}": {
|
||||||
"get": {
|
"get": {
|
||||||
"operationId": "retrieveModel",
|
"operationId": "retrieveModel",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Retrieves a model instance, providing basic information about the model such as the owner and permissioning.",
|
"summary": "Retrieves a model instance, providing basic information about the model such as the owner and permissioning.",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
@@ -897,9 +855,7 @@
|
|||||||
},
|
},
|
||||||
"delete": {
|
"delete": {
|
||||||
"operationId": "deleteModel",
|
"operationId": "deleteModel",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Delete a fine-tuned model. You must have the Owner role in your organization.",
|
"summary": "Delete a fine-tuned model. You must have the Owner role in your organization.",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
@@ -941,9 +897,7 @@
|
|||||||
"/moderations": {
|
"/moderations": {
|
||||||
"post": {
|
"post": {
|
||||||
"operationId": "createModeration",
|
"operationId": "createModeration",
|
||||||
"tags": [
|
"tags": ["OpenAI"],
|
||||||
"OpenAI"
|
|
||||||
],
|
|
||||||
"summary": "Classifies if text violates OpenAI's Content Policy",
|
"summary": "Classifies if text violates OpenAI's Content Policy",
|
||||||
"requestBody": {
|
"requestBody": {
|
||||||
"required": true,
|
"required": true,
|
||||||
@@ -1004,12 +958,7 @@
|
|||||||
"nullable": true
|
"nullable": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["type", "message", "param", "code"]
|
||||||
"type",
|
|
||||||
"message",
|
|
||||||
"param",
|
|
||||||
"code"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ErrorResponse": {
|
"ErrorResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1018,9 +967,7 @@
|
|||||||
"$ref": "#/components/schemas/Error"
|
"$ref": "#/components/schemas/Error"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["error"]
|
||||||
"error"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ListModelsResponse": {
|
"ListModelsResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1035,10 +982,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["object", "data"]
|
||||||
"object",
|
|
||||||
"data"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"DeleteModelResponse": {
|
"DeleteModelResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1053,11 +997,7 @@
|
|||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["id", "object", "deleted"]
|
||||||
"id",
|
|
||||||
"object",
|
|
||||||
"deleted"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateCompletionRequest": {
|
"CreateCompletionRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1243,10 +1183,7 @@
|
|||||||
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["model", "prompt"]
|
||||||
"model",
|
|
||||||
"prompt"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateCompletionResponse": {
|
"CreateCompletionResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1267,12 +1204,7 @@
|
|||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": ["text", "index", "logprobs", "finish_reason"],
|
||||||
"text",
|
|
||||||
"index",
|
|
||||||
"logprobs",
|
|
||||||
"finish_reason"
|
|
||||||
],
|
|
||||||
"properties": {
|
"properties": {
|
||||||
"text": {
|
"text": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@@ -1312,10 +1244,7 @@
|
|||||||
},
|
},
|
||||||
"finish_reason": {
|
"finish_reason": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["stop", "length"]
|
||||||
"stop",
|
|
||||||
"length"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1333,32 +1262,17 @@
|
|||||||
"type": "integer"
|
"type": "integer"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["prompt_tokens", "completion_tokens", "total_tokens"]
|
||||||
"prompt_tokens",
|
|
||||||
"completion_tokens",
|
|
||||||
"total_tokens"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["id", "object", "created", "model", "choices"]
|
||||||
"id",
|
|
||||||
"object",
|
|
||||||
"created",
|
|
||||||
"model",
|
|
||||||
"choices"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ChatCompletionRequestMessage": {
|
"ChatCompletionRequestMessage": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"role": {
|
"role": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["system", "user", "assistant", "function"],
|
||||||
"system",
|
|
||||||
"user",
|
|
||||||
"assistant",
|
|
||||||
"function"
|
|
||||||
],
|
|
||||||
"description": "The role of the messages author. One of `system`, `user`, `assistant`, or `function`."
|
"description": "The role of the messages author. One of `system`, `user`, `assistant`, or `function`."
|
||||||
},
|
},
|
||||||
"content": {
|
"content": {
|
||||||
@@ -1384,9 +1298,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["role"]
|
||||||
"role"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ChatCompletionFunctionParameters": {
|
"ChatCompletionFunctionParameters": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1408,21 +1320,14 @@
|
|||||||
"$ref": "#/components/schemas/ChatCompletionFunctionParameters"
|
"$ref": "#/components/schemas/ChatCompletionFunctionParameters"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["name"]
|
||||||
"name"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ChatCompletionResponseMessage": {
|
"ChatCompletionResponseMessage": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"role": {
|
"role": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["system", "user", "assistant", "function"],
|
||||||
"system",
|
|
||||||
"user",
|
|
||||||
"assistant",
|
|
||||||
"function"
|
|
||||||
],
|
|
||||||
"description": "The role of the author of this message."
|
"description": "The role of the author of this message."
|
||||||
},
|
},
|
||||||
"content": {
|
"content": {
|
||||||
@@ -1445,21 +1350,14 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["role"]
|
||||||
"role"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ChatCompletionStreamResponseDelta": {
|
"ChatCompletionStreamResponseDelta": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"role": {
|
"role": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["system", "user", "assistant", "function"],
|
||||||
"system",
|
|
||||||
"user",
|
|
||||||
"assistant",
|
|
||||||
"function"
|
|
||||||
],
|
|
||||||
"description": "The role of the author of this message."
|
"description": "The role of the author of this message."
|
||||||
},
|
},
|
||||||
"content": {
|
"content": {
|
||||||
@@ -1522,10 +1420,7 @@
|
|||||||
"oneOf": [
|
"oneOf": [
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["none", "auto"]
|
||||||
"none",
|
|
||||||
"auto"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1535,9 +1430,7 @@
|
|||||||
"description": "The name of the function to call."
|
"description": "The name of the function to call."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["name"]
|
||||||
"name"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -1626,10 +1519,7 @@
|
|||||||
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["model", "messages"]
|
||||||
"model",
|
|
||||||
"messages"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateChatCompletionResponse": {
|
"CreateChatCompletionResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1659,11 +1549,7 @@
|
|||||||
},
|
},
|
||||||
"finish_reason": {
|
"finish_reason": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["stop", "length", "function_call"]
|
||||||
"stop",
|
|
||||||
"length",
|
|
||||||
"function_call"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1681,20 +1567,10 @@
|
|||||||
"type": "integer"
|
"type": "integer"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["prompt_tokens", "completion_tokens", "total_tokens"]
|
||||||
"prompt_tokens",
|
|
||||||
"completion_tokens",
|
|
||||||
"total_tokens"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["id", "object", "created", "model", "choices"]
|
||||||
"id",
|
|
||||||
"object",
|
|
||||||
"created",
|
|
||||||
"model",
|
|
||||||
"choices"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateChatCompletionStreamResponse": {
|
"CreateChatCompletionStreamResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1724,23 +1600,13 @@
|
|||||||
},
|
},
|
||||||
"finish_reason": {
|
"finish_reason": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["stop", "length", "function_call"]
|
||||||
"stop",
|
|
||||||
"length",
|
|
||||||
"function_call"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["id", "object", "created", "model", "choices"]
|
||||||
"id",
|
|
||||||
"object",
|
|
||||||
"created",
|
|
||||||
"model",
|
|
||||||
"choices"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateEditRequest": {
|
"CreateEditRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1755,10 +1621,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["text-davinci-edit-001", "code-davinci-edit-001"]
|
||||||
"text-davinci-edit-001",
|
|
||||||
"code-davinci-edit-001"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -1802,10 +1665,7 @@
|
|||||||
"description": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.\n\nWe generally recommend altering this or `temperature` but not both.\n"
|
"description": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.\n\nWe generally recommend altering this or `temperature` but not both.\n"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["model", "instruction"]
|
||||||
"model",
|
|
||||||
"instruction"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateEditResponse": {
|
"CreateEditResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1859,10 +1719,7 @@
|
|||||||
},
|
},
|
||||||
"finish_reason": {
|
"finish_reason": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["stop", "length"]
|
||||||
"stop",
|
|
||||||
"length"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1880,19 +1737,10 @@
|
|||||||
"type": "integer"
|
"type": "integer"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["prompt_tokens", "completion_tokens", "total_tokens"]
|
||||||
"prompt_tokens",
|
|
||||||
"completion_tokens",
|
|
||||||
"total_tokens"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["object", "created", "choices", "usage"]
|
||||||
"object",
|
|
||||||
"created",
|
|
||||||
"choices",
|
|
||||||
"usage"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateImageRequest": {
|
"CreateImageRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1913,11 +1761,7 @@
|
|||||||
},
|
},
|
||||||
"size": {
|
"size": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["256x256", "512x512", "1024x1024"],
|
||||||
"256x256",
|
|
||||||
"512x512",
|
|
||||||
"1024x1024"
|
|
||||||
],
|
|
||||||
"default": "1024x1024",
|
"default": "1024x1024",
|
||||||
"example": "1024x1024",
|
"example": "1024x1024",
|
||||||
"nullable": true,
|
"nullable": true,
|
||||||
@@ -1925,10 +1769,7 @@
|
|||||||
},
|
},
|
||||||
"response_format": {
|
"response_format": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["url", "b64_json"],
|
||||||
"url",
|
|
||||||
"b64_json"
|
|
||||||
],
|
|
||||||
"default": "url",
|
"default": "url",
|
||||||
"example": "url",
|
"example": "url",
|
||||||
"nullable": true,
|
"nullable": true,
|
||||||
@@ -1940,9 +1781,7 @@
|
|||||||
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["prompt"]
|
||||||
"prompt"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ImagesResponse": {
|
"ImagesResponse": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@@ -1964,10 +1803,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["created", "data"]
|
||||||
"created",
|
|
||||||
"data"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateImageEditRequest": {
|
"CreateImageEditRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -1998,11 +1834,7 @@
|
|||||||
},
|
},
|
||||||
"size": {
|
"size": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["256x256", "512x512", "1024x1024"],
|
||||||
"256x256",
|
|
||||||
"512x512",
|
|
||||||
"1024x1024"
|
|
||||||
],
|
|
||||||
"default": "1024x1024",
|
"default": "1024x1024",
|
||||||
"example": "1024x1024",
|
"example": "1024x1024",
|
||||||
"nullable": true,
|
"nullable": true,
|
||||||
@@ -2010,10 +1842,7 @@
|
|||||||
},
|
},
|
||||||
"response_format": {
|
"response_format": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["url", "b64_json"],
|
||||||
"url",
|
|
||||||
"b64_json"
|
|
||||||
],
|
|
||||||
"default": "url",
|
"default": "url",
|
||||||
"example": "url",
|
"example": "url",
|
||||||
"nullable": true,
|
"nullable": true,
|
||||||
@@ -2025,10 +1854,7 @@
|
|||||||
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["prompt", "image"]
|
||||||
"prompt",
|
|
||||||
"image"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateImageVariationRequest": {
|
"CreateImageVariationRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2049,11 +1875,7 @@
|
|||||||
},
|
},
|
||||||
"size": {
|
"size": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["256x256", "512x512", "1024x1024"],
|
||||||
"256x256",
|
|
||||||
"512x512",
|
|
||||||
"1024x1024"
|
|
||||||
],
|
|
||||||
"default": "1024x1024",
|
"default": "1024x1024",
|
||||||
"example": "1024x1024",
|
"example": "1024x1024",
|
||||||
"nullable": true,
|
"nullable": true,
|
||||||
@@ -2061,10 +1883,7 @@
|
|||||||
},
|
},
|
||||||
"response_format": {
|
"response_format": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["url", "b64_json"],
|
||||||
"url",
|
|
||||||
"b64_json"
|
|
||||||
],
|
|
||||||
"default": "url",
|
"default": "url",
|
||||||
"example": "url",
|
"example": "url",
|
||||||
"nullable": true,
|
"nullable": true,
|
||||||
@@ -2076,9 +1895,7 @@
|
|||||||
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["image"]
|
||||||
"image"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateModerationRequest": {
|
"CreateModerationRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2112,17 +1929,12 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["text-moderation-latest", "text-moderation-stable"]
|
||||||
"text-moderation-latest",
|
|
||||||
"text-moderation-stable"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["input"]
|
||||||
"input"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateModerationResponse": {
|
"CreateModerationResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2212,19 +2024,11 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["flagged", "categories", "category_scores"]
|
||||||
"flagged",
|
|
||||||
"categories",
|
|
||||||
"category_scores"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["id", "model", "results"]
|
||||||
"id",
|
|
||||||
"model",
|
|
||||||
"results"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ListFilesResponse": {
|
"ListFilesResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2239,10 +2043,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["object", "data"]
|
||||||
"object",
|
|
||||||
"data"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateFileRequest": {
|
"CreateFileRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2258,10 +2059,7 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["file", "purpose"]
|
||||||
"file",
|
|
||||||
"purpose"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"DeleteFileResponse": {
|
"DeleteFileResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2276,11 +2074,7 @@
|
|||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["id", "object", "deleted"]
|
||||||
"id",
|
|
||||||
"object",
|
|
||||||
"deleted"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateFineTuneRequest": {
|
"CreateFineTuneRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2307,12 +2101,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["ada", "babbage", "curie", "davinci"]
|
||||||
"ada",
|
|
||||||
"babbage",
|
|
||||||
"curie",
|
|
||||||
"davinci"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -2364,12 +2153,7 @@
|
|||||||
"items": {
|
"items": {
|
||||||
"type": "number"
|
"type": "number"
|
||||||
},
|
},
|
||||||
"example": [
|
"example": [0.6, 1, 1.5, 2],
|
||||||
0.6,
|
|
||||||
1,
|
|
||||||
1.5,
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"default": null,
|
"default": null,
|
||||||
"nullable": true
|
"nullable": true
|
||||||
},
|
},
|
||||||
@@ -2382,9 +2166,7 @@
|
|||||||
"nullable": true
|
"nullable": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["training_file"]
|
||||||
"training_file"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ListFineTunesResponse": {
|
"ListFineTunesResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2399,10 +2181,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["object", "data"]
|
||||||
"object",
|
|
||||||
"data"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"ListFineTuneEventsResponse": {
|
"ListFineTuneEventsResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2417,10 +2196,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["object", "data"]
|
||||||
"object",
|
|
||||||
"data"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateEmbeddingRequest": {
|
"CreateEmbeddingRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2435,9 +2211,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["text-embedding-ada-002"]
|
||||||
"text-embedding-ada-002"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -2486,10 +2260,7 @@
|
|||||||
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).\n"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["model", "input"]
|
||||||
"model",
|
|
||||||
"input"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateEmbeddingResponse": {
|
"CreateEmbeddingResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2518,11 +2289,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["index", "object", "embedding"]
|
||||||
"index",
|
|
||||||
"object",
|
|
||||||
"embedding"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"usage": {
|
"usage": {
|
||||||
@@ -2535,18 +2302,10 @@
|
|||||||
"type": "integer"
|
"type": "integer"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["prompt_tokens", "total_tokens"]
|
||||||
"prompt_tokens",
|
|
||||||
"total_tokens"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["object", "model", "data", "usage"]
|
||||||
"object",
|
|
||||||
"model",
|
|
||||||
"data",
|
|
||||||
"usage"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateTranscriptionRequest": {
|
"CreateTranscriptionRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2567,9 +2326,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["whisper-1"]
|
||||||
"whisper-1"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -2592,10 +2349,7 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["file", "model"]
|
||||||
"file",
|
|
||||||
"model"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateTranscriptionResponse": {
|
"CreateTranscriptionResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2604,9 +2358,7 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["text"]
|
||||||
"text"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateTranslationRequest": {
|
"CreateTranslationRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2627,9 +2379,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": ["whisper-1"]
|
||||||
"whisper-1"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -2648,10 +2398,7 @@
|
|||||||
"default": 0
|
"default": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["file", "model"]
|
||||||
"file",
|
|
||||||
"model"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"CreateTranslationResponse": {
|
"CreateTranslationResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -2660,9 +2407,7 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["text"]
|
||||||
"text"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"Model": {
|
"Model": {
|
||||||
"title": "Model",
|
"title": "Model",
|
||||||
@@ -2680,12 +2425,7 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["id", "object", "created", "owned_by"]
|
||||||
"id",
|
|
||||||
"object",
|
|
||||||
"created",
|
|
||||||
"owned_by"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"OpenAIFile": {
|
"OpenAIFile": {
|
||||||
"title": "OpenAIFile",
|
"title": "OpenAIFile",
|
||||||
@@ -2716,14 +2456,7 @@
|
|||||||
"nullable": true
|
"nullable": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["id", "object", "bytes", "created_at", "filename", "purpose"]
|
||||||
"id",
|
|
||||||
"object",
|
|
||||||
"bytes",
|
|
||||||
"created_at",
|
|
||||||
"filename",
|
|
||||||
"purpose"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"FineTune": {
|
"FineTune": {
|
||||||
"title": "FineTune",
|
"title": "FineTune",
|
||||||
@@ -2812,12 +2545,7 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": ["object", "created_at", "level", "message"]
|
||||||
"object",
|
|
||||||
"created_at",
|
|
||||||
"level",
|
|
||||||
"message"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -189,7 +189,7 @@ export default function EditEvaluations() {
|
|||||||
<Icon as={BsX} boxSize={6} />
|
<Icon as={BsX} boxSize={6} />
|
||||||
</Button>
|
</Button>
|
||||||
</HStack>
|
</HStack>
|
||||||
)
|
),
|
||||||
)}
|
)}
|
||||||
{editingId == null && (
|
{editingId == null && (
|
||||||
<Button
|
<Button
|
||||||
|
|||||||
@@ -1,12 +1,6 @@
|
|||||||
import { type RouterOutputs, api } from "~/utils/api";
|
import { type RouterOutputs, api } from "~/utils/api";
|
||||||
import { type PromptVariant, type Scenario } from "../types";
|
import { type PromptVariant, type Scenario } from "../types";
|
||||||
import {
|
import { Spinner, Text, Box, Center, Flex } from "@chakra-ui/react";
|
||||||
Spinner,
|
|
||||||
Text,
|
|
||||||
Box,
|
|
||||||
Center,
|
|
||||||
Flex,
|
|
||||||
} from "@chakra-ui/react";
|
|
||||||
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
import { useExperiment, useHandledAsyncCallback } from "~/utils/hooks";
|
||||||
import SyntaxHighlighter from "react-syntax-highlighter";
|
import SyntaxHighlighter from "react-syntax-highlighter";
|
||||||
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
|
import { docco } from "react-syntax-highlighter/dist/cjs/styles/hljs";
|
||||||
@@ -82,7 +76,7 @@ export default function OutputCell({
|
|||||||
await utils.promptVariants.stats.invalidate();
|
await utils.promptVariants.stats.invalidate();
|
||||||
fetchMutex.current = false;
|
fetchMutex.current = false;
|
||||||
},
|
},
|
||||||
[outputMutation, scenario.id, variant.id]
|
[outputMutation, scenario.id, variant.id],
|
||||||
);
|
);
|
||||||
const hardRefetch = useCallback(() => fetchOutput(true), [fetchOutput]);
|
const hardRefetch = useCallback(() => fetchOutput(true), [fetchOutput]);
|
||||||
|
|
||||||
@@ -143,7 +137,7 @@ export default function OutputCell({
|
|||||||
function: message.function_call.name,
|
function: message.function_call.name,
|
||||||
args: parsedArgs,
|
args: parsedArgs,
|
||||||
},
|
},
|
||||||
{ maxLength: 40 }
|
{ maxLength: 40 },
|
||||||
)}
|
)}
|
||||||
</SyntaxHighlighter>
|
</SyntaxHighlighter>
|
||||||
<OutputStats model={model} modelOutput={output} scenario={scenario} />
|
<OutputStats model={model} modelOutput={output} scenario={scenario} />
|
||||||
@@ -160,5 +154,3 @@ export default function OutputCell({
|
|||||||
</Flex>
|
</Flex>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ export default function ScenarioEditor({
|
|||||||
});
|
});
|
||||||
await utils.scenarios.list.invalidate();
|
await utils.scenarios.list.invalidate();
|
||||||
},
|
},
|
||||||
[reorderMutation, scenario.id]
|
[reorderMutation, scenario.id],
|
||||||
);
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ export default function VariantConfigEditor(props: { variant: PromptVariant }) {
|
|||||||
|
|
||||||
const savedConfig = useMemo(
|
const savedConfig = useMemo(
|
||||||
() => JSON.stringify(props.variant.config, null, 2),
|
() => JSON.stringify(props.variant.config, null, 2),
|
||||||
[props.variant.config]
|
[props.variant.config],
|
||||||
);
|
);
|
||||||
const savedConfigRef = useRef(savedConfig);
|
const savedConfigRef = useRef(savedConfig);
|
||||||
|
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ export default function VariantHeader(props: { variant: PromptVariant }) {
|
|||||||
});
|
});
|
||||||
await utils.promptVariants.list.invalidate();
|
await utils.promptVariants.list.invalidate();
|
||||||
},
|
},
|
||||||
[reorderMutation, props.variant.id]
|
[reorderMutation, props.variant.id],
|
||||||
);
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ export default function VariantStats(props: { variant: PromptVariant }) {
|
|||||||
scenarioCount: 0,
|
scenarioCount: 0,
|
||||||
outputCount: 0,
|
outputCount: 0,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
const [passColor, neutralColor, failColor] = useToken("colors", [
|
const [passColor, neutralColor, failColor] = useToken("colors", [
|
||||||
|
|||||||
@@ -20,13 +20,13 @@ const stickyHeaderStyle: SystemStyleObject = {
|
|||||||
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
|
export default function OutputsTable({ experimentId }: { experimentId: string | undefined }) {
|
||||||
const variants = api.promptVariants.list.useQuery(
|
const variants = api.promptVariants.list.useQuery(
|
||||||
{ experimentId: experimentId as string },
|
{ experimentId: experimentId as string },
|
||||||
{ enabled: !!experimentId }
|
{ enabled: !!experimentId },
|
||||||
);
|
);
|
||||||
const openDrawer = useStore((s) => s.openDrawer);
|
const openDrawer = useStore((s) => s.openDrawer);
|
||||||
|
|
||||||
const scenarios = api.scenarios.list.useQuery(
|
const scenarios = api.scenarios.list.useQuery(
|
||||||
{ experimentId: experimentId as string },
|
{ experimentId: experimentId as string },
|
||||||
{ enabled: !!experimentId }
|
{ enabled: !!experimentId },
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!variants.data || !scenarios.data) return null;
|
if (!variants.data || !scenarios.data) return null;
|
||||||
|
|||||||
@@ -30,9 +30,7 @@ export default function ExperimentsPage() {
|
|||||||
<NewExperimentButton mr={4} borderRadius={8} />
|
<NewExperimentButton mr={4} borderRadius={8} />
|
||||||
</HStack>
|
</HStack>
|
||||||
<SimpleGrid w="full" columns={{ base: 1, md: 2, lg: 3, xl: 4 }} spacing={8} p="4">
|
<SimpleGrid w="full" columns={{ base: 1, md: 2, lg: 3, xl: 4 }} spacing={8} p="4">
|
||||||
{experiments?.data?.map((exp) => (
|
{experiments?.data?.map((exp) => <ExperimentCard key={exp.id} exp={exp} />)}
|
||||||
<ExperimentCard key={exp.id} exp={exp} />
|
|
||||||
))}
|
|
||||||
</SimpleGrid>
|
</SimpleGrid>
|
||||||
</VStack>
|
</VStack>
|
||||||
</AppShell>
|
</AppShell>
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
import { type GetServerSideProps } from 'next';
|
import { type GetServerSideProps } from "next";
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/require-await
|
// eslint-disable-next-line @typescript-eslint/require-await
|
||||||
export const getServerSideProps: GetServerSideProps = async (context) => {
|
export const getServerSideProps: GetServerSideProps = async (context) => {
|
||||||
return {
|
return {
|
||||||
redirect: {
|
redirect: {
|
||||||
destination: '/experiments',
|
destination: "/experiments",
|
||||||
permanent: false,
|
permanent: false,
|
||||||
},
|
},
|
||||||
}
|
};
|
||||||
}
|
};
|
||||||
|
|
||||||
export default function Home() {
|
export default function Home() {
|
||||||
return null;
|
return null;
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
import { type CompletionCreateParams } from "openai/resources/chat";
|
import { type CompletionCreateParams } from "openai/resources/chat";
|
||||||
import { prisma } from "../db";
|
import { prisma } from "../db";
|
||||||
import { openai } from "../utils/openai";
|
import { openai } from "../utils/openai";
|
||||||
@@ -27,7 +26,7 @@ function isAxiosError(error: unknown): error is AxiosError {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
export const autogenerateScenarioValues = async (
|
export const autogenerateScenarioValues = async (
|
||||||
experimentId: string
|
experimentId: string,
|
||||||
): Promise<Record<string, string>> => {
|
): Promise<Record<string, string>> => {
|
||||||
const [experiment, variables, existingScenarios, prompt] = await Promise.all([
|
const [experiment, variables, existingScenarios, prompt] = await Promise.all([
|
||||||
prisma.experiment.findUnique({
|
prisma.experiment.findUnique({
|
||||||
@@ -84,8 +83,8 @@ export const autogenerateScenarioValues = async (
|
|||||||
(scenario) =>
|
(scenario) =>
|
||||||
pick(
|
pick(
|
||||||
scenario.variableValues,
|
scenario.variableValues,
|
||||||
variables.map((variable) => variable.label)
|
variables.map((variable) => variable.label),
|
||||||
) as Record<string, string>
|
) as Record<string, string>,
|
||||||
)
|
)
|
||||||
.filter((vals) => Object.keys(vals ?? {}).length > 0)
|
.filter((vals) => Object.keys(vals ?? {}).length > 0)
|
||||||
.forEach((vals) => {
|
.forEach((vals) => {
|
||||||
@@ -99,10 +98,13 @@ export const autogenerateScenarioValues = async (
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
const variableProperties = variables.reduce((acc, variable) => {
|
const variableProperties = variables.reduce(
|
||||||
|
(acc, variable) => {
|
||||||
acc[variable.label] = { type: "string" };
|
acc[variable.label] = { type: "string" };
|
||||||
return acc;
|
return acc;
|
||||||
}, {} as Record<string, { type: "string" }>);
|
},
|
||||||
|
{} as Record<string, { type: "string" }>,
|
||||||
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const completion = await openai.chat.completions.create({
|
const completion = await openai.chat.completions.create({
|
||||||
@@ -123,7 +125,7 @@ export const autogenerateScenarioValues = async (
|
|||||||
});
|
});
|
||||||
|
|
||||||
const parsed = JSON.parse(
|
const parsed = JSON.parse(
|
||||||
completion.choices[0]?.message?.function_call?.arguments ?? "{}"
|
completion.choices[0]?.message?.function_call?.arguments ?? "{}",
|
||||||
) as Record<string, string>;
|
) as Record<string, string>;
|
||||||
return parsed;
|
return parsed;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ export const evaluationsRouter = createTRPCRouter({
|
|||||||
name: z.string(),
|
name: z.string(),
|
||||||
matchString: z.string(),
|
matchString: z.string(),
|
||||||
matchType: z.nativeEnum(EvaluationMatchType),
|
matchType: z.nativeEnum(EvaluationMatchType),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
const evaluation = await prisma.evaluation.create({
|
const evaluation = await prisma.evaluation.create({
|
||||||
@@ -44,7 +44,7 @@ export const evaluationsRouter = createTRPCRouter({
|
|||||||
matchString: z.string().optional(),
|
matchString: z.string().optional(),
|
||||||
matchType: z.nativeEnum(EvaluationMatchType).optional(),
|
matchType: z.nativeEnum(EvaluationMatchType).optional(),
|
||||||
}),
|
}),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
await prisma.evaluation.update({
|
await prisma.evaluation.update({
|
||||||
@@ -56,7 +56,7 @@ export const evaluationsRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
await reevaluateEvaluation(
|
await reevaluateEvaluation(
|
||||||
await prisma.evaluation.findUniqueOrThrow({ where: { id: input.id } })
|
await prisma.evaluation.findUniqueOrThrow({ where: { id: input.id } }),
|
||||||
);
|
);
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ export const experimentsRouter = createTRPCRouter({
|
|||||||
testScenarioCount: visibleTestScenarioCount,
|
testScenarioCount: visibleTestScenarioCount,
|
||||||
promptVariantCount: visiblePromptVariantCount,
|
promptVariantCount: visiblePromptVariantCount,
|
||||||
};
|
};
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
return experimentsWithCounts;
|
return experimentsWithCounts;
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ export const modelOutputsRouter = createTRPCRouter({
|
|||||||
variantId: z.string(),
|
variantId: z.string(),
|
||||||
channel: z.string().optional(),
|
channel: z.string().optional(),
|
||||||
forceRefetch: z.boolean().optional(),
|
forceRefetch: z.boolean().optional(),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
const existing = await prisma.modelOutput.findUnique({
|
const existing = await prisma.modelOutput.findUnique({
|
||||||
@@ -46,7 +46,7 @@ export const modelOutputsRouter = createTRPCRouter({
|
|||||||
|
|
||||||
const filledTemplate = fillTemplateJson(
|
const filledTemplate = fillTemplateJson(
|
||||||
variant.config as JSONSerializable,
|
variant.config as JSONSerializable,
|
||||||
scenario.variableValues as VariableMap
|
scenario.variableValues as VariableMap,
|
||||||
);
|
);
|
||||||
|
|
||||||
const inputHash = crypto
|
const inputHash = crypto
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
.input(
|
.input(
|
||||||
z.object({
|
z.object({
|
||||||
experimentId: z.string(),
|
experimentId: z.string(),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
const lastVariant = await prisma.promptVariant.findFirst({
|
const lastVariant = await prisma.promptVariant.findFirst({
|
||||||
@@ -126,7 +126,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
updates: z.object({
|
updates: z.object({
|
||||||
label: z.string().optional(),
|
label: z.string().optional(),
|
||||||
}),
|
}),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
const existing = await prisma.promptVariant.findUnique({
|
const existing = await prisma.promptVariant.findUnique({
|
||||||
@@ -151,7 +151,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
.input(
|
.input(
|
||||||
z.object({
|
z.object({
|
||||||
id: z.string(),
|
id: z.string(),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
return await prisma.promptVariant.update({
|
return await prisma.promptVariant.update({
|
||||||
@@ -165,7 +165,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
z.object({
|
z.object({
|
||||||
id: z.string(),
|
id: z.string(),
|
||||||
config: z.string(),
|
config: z.string(),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
const existing = await prisma.promptVariant.findUnique({
|
const existing = await prisma.promptVariant.findUnique({
|
||||||
@@ -217,7 +217,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
z.object({
|
z.object({
|
||||||
draggedId: z.string(),
|
draggedId: z.string(),
|
||||||
droppedId: z.string(),
|
droppedId: z.string(),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
const dragged = await prisma.promptVariant.findUnique({
|
const dragged = await prisma.promptVariant.findUnique({
|
||||||
@@ -234,7 +234,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
|
|
||||||
if (!dragged || !dropped || dragged.experimentId !== dropped.experimentId) {
|
if (!dragged || !dropped || dragged.experimentId !== dropped.experimentId) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Prompt Variant with id ${input.draggedId} or ${input.droppedId} does not exist`
|
`Prompt Variant with id ${input.draggedId} or ${input.droppedId} does not exist`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -277,7 +277,7 @@ export const promptVariantsRouter = createTRPCRouter({
|
|||||||
sortIndex: index,
|
sortIndex: index,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ export const scenariosRouter = createTRPCRouter({
|
|||||||
z.object({
|
z.object({
|
||||||
experimentId: z.string(),
|
experimentId: z.string(),
|
||||||
autogenerate: z.boolean().optional(),
|
autogenerate: z.boolean().optional(),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
const maxSortIndex =
|
const maxSortIndex =
|
||||||
@@ -68,7 +68,7 @@ export const scenariosRouter = createTRPCRouter({
|
|||||||
z.object({
|
z.object({
|
||||||
draggedId: z.string(),
|
draggedId: z.string(),
|
||||||
droppedId: z.string(),
|
droppedId: z.string(),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
const dragged = await prisma.testScenario.findUnique({
|
const dragged = await prisma.testScenario.findUnique({
|
||||||
@@ -85,7 +85,7 @@ export const scenariosRouter = createTRPCRouter({
|
|||||||
|
|
||||||
if (!dragged || !dropped || dragged.experimentId !== dropped.experimentId) {
|
if (!dragged || !dropped || dragged.experimentId !== dropped.experimentId) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Prompt Variant with id ${input.draggedId} or ${input.droppedId} does not exist`
|
`Prompt Variant with id ${input.draggedId} or ${input.droppedId} does not exist`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -128,7 +128,7 @@ export const scenariosRouter = createTRPCRouter({
|
|||||||
sortIndex: index,
|
sortIndex: index,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
}),
|
}),
|
||||||
|
|
||||||
@@ -137,7 +137,7 @@ export const scenariosRouter = createTRPCRouter({
|
|||||||
z.object({
|
z.object({
|
||||||
id: z.string(),
|
id: z.string(),
|
||||||
values: z.record(z.string()),
|
values: z.record(z.string()),
|
||||||
})
|
}),
|
||||||
)
|
)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
const existing = await prisma.testScenario.findUnique({
|
const existing = await prisma.testScenario.findUnique({
|
||||||
|
|||||||
@@ -76,8 +76,7 @@ const t = initTRPC.context<typeof createTRPCContext>().create({
|
|||||||
...shape,
|
...shape,
|
||||||
data: {
|
data: {
|
||||||
...shape.data,
|
...shape.data,
|
||||||
zodError:
|
zodError: error.cause instanceof ZodError ? error.cause.flatten() : null,
|
||||||
error.cause instanceof ZodError ? error.cause.flatten() : null,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -8,8 +8,7 @@ const globalForPrisma = globalThis as unknown as {
|
|||||||
export const prisma =
|
export const prisma =
|
||||||
globalForPrisma.prisma ??
|
globalForPrisma.prisma ??
|
||||||
new PrismaClient({
|
new PrismaClient({
|
||||||
log:
|
log: env.NODE_ENV === "development" ? ["query", "error", "warn"] : ["error"],
|
||||||
env.NODE_ENV === "development" ? ["query", "error", "warn"] : ["error"],
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (env.NODE_ENV !== "production") globalForPrisma.prisma = prisma;
|
if (env.NODE_ENV !== "production") globalForPrisma.prisma = prisma;
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import { type VariableMap, fillTemplate } from "./fillTemplate";
|
|||||||
export const evaluateOutput = (
|
export const evaluateOutput = (
|
||||||
modelOutput: ModelOutput,
|
modelOutput: ModelOutput,
|
||||||
scenario: TestScenario,
|
scenario: TestScenario,
|
||||||
evaluation: Evaluation
|
evaluation: Evaluation,
|
||||||
): boolean => {
|
): boolean => {
|
||||||
const output = modelOutput.output as unknown as ChatCompletion;
|
const output = modelOutput.output as unknown as ChatCompletion;
|
||||||
const message = output?.choices?.[0]?.message;
|
const message = output?.choices?.[0]?.message;
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ export const reevaluateVariant = async (variantId: string) => {
|
|||||||
await Promise.all(
|
await Promise.all(
|
||||||
evaluations.map(async (evaluation) => {
|
evaluations.map(async (evaluation) => {
|
||||||
const passCount = modelOutputs.filter((output) =>
|
const passCount = modelOutputs.filter((output) =>
|
||||||
evaluateOutput(output, output.testScenario, evaluation)
|
evaluateOutput(output, output.testScenario, evaluation),
|
||||||
).length;
|
).length;
|
||||||
const failCount = modelOutputs.length - passCount;
|
const failCount = modelOutputs.length - passCount;
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ export const reevaluateVariant = async (variantId: string) => {
|
|||||||
failCount,
|
failCount,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -64,7 +64,7 @@ export const reevaluateEvaluation = async (evaluation: Evaluation) => {
|
|||||||
variants.map(async (variant) => {
|
variants.map(async (variant) => {
|
||||||
const outputs = modelOutputs.filter((output) => output.promptVariantId === variant.id);
|
const outputs = modelOutputs.filter((output) => output.promptVariantId === variant.id);
|
||||||
const passCount = outputs.filter((output) =>
|
const passCount = outputs.filter((output) =>
|
||||||
evaluateOutput(output, output.testScenario, evaluation)
|
evaluateOutput(output, output.testScenario, evaluation),
|
||||||
).length;
|
).length;
|
||||||
const failCount = outputs.length - passCount;
|
const failCount = outputs.length - passCount;
|
||||||
|
|
||||||
@@ -86,6 +86,6 @@ export const reevaluateEvaluation = async (evaluation: Evaluation) => {
|
|||||||
failCount,
|
failCount,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -8,17 +8,20 @@ export function fillTemplate(template: string, variables: VariableMap): string {
|
|||||||
|
|
||||||
export function fillTemplateJson<T extends JSONSerializable>(
|
export function fillTemplateJson<T extends JSONSerializable>(
|
||||||
template: T,
|
template: T,
|
||||||
variables: VariableMap
|
variables: VariableMap,
|
||||||
): T {
|
): T {
|
||||||
if (typeof template === "string") {
|
if (typeof template === "string") {
|
||||||
return fillTemplate(template, variables) as T;
|
return fillTemplate(template, variables) as T;
|
||||||
} else if (Array.isArray(template)) {
|
} else if (Array.isArray(template)) {
|
||||||
return template.map((item) => fillTemplateJson(item, variables)) as T;
|
return template.map((item) => fillTemplateJson(item, variables)) as T;
|
||||||
} else if (typeof template === "object" && template !== null) {
|
} else if (typeof template === "object" && template !== null) {
|
||||||
return Object.keys(template).reduce((acc, key) => {
|
return Object.keys(template).reduce(
|
||||||
|
(acc, key) => {
|
||||||
acc[key] = fillTemplateJson(template[key] as JSONSerializable, variables);
|
acc[key] = fillTemplateJson(template[key] as JSONSerializable, variables);
|
||||||
return acc;
|
return acc;
|
||||||
}, {} as { [key: string]: JSONSerializable } & T);
|
},
|
||||||
|
{} as { [key: string]: JSONSerializable } & T,
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
return template;
|
return template;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ type CompletionResponse = {
|
|||||||
|
|
||||||
export async function getCompletion(
|
export async function getCompletion(
|
||||||
payload: JSONSerializable,
|
payload: JSONSerializable,
|
||||||
channel?: string
|
channel?: string,
|
||||||
): Promise<CompletionResponse> {
|
): Promise<CompletionResponse> {
|
||||||
const modelName = getModelName(payload);
|
const modelName = getModelName(payload);
|
||||||
if (!modelName)
|
if (!modelName)
|
||||||
@@ -37,7 +37,7 @@ export async function getCompletion(
|
|||||||
return getOpenAIChatCompletion(
|
return getOpenAIChatCompletion(
|
||||||
payload as unknown as CompletionCreateParams,
|
payload as unknown as CompletionCreateParams,
|
||||||
env.OPENAI_API_KEY,
|
env.OPENAI_API_KEY,
|
||||||
channel
|
channel,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
@@ -51,7 +51,7 @@ export async function getCompletion(
|
|||||||
export async function getOpenAIChatCompletion(
|
export async function getOpenAIChatCompletion(
|
||||||
payload: CompletionCreateParams,
|
payload: CompletionCreateParams,
|
||||||
apiKey: string,
|
apiKey: string,
|
||||||
channel?: string
|
channel?: string,
|
||||||
): Promise<CompletionResponse> {
|
): Promise<CompletionResponse> {
|
||||||
// If functions are enabled, disable streaming so that we get the full response with token counts
|
// If functions are enabled, disable streaming so that we get the full response with token counts
|
||||||
if (payload.functions?.length) payload.stream = false;
|
if (payload.functions?.length) payload.stream = false;
|
||||||
|
|||||||
@@ -2,7 +2,11 @@ import { omit } from "lodash";
|
|||||||
import { env } from "~/env.mjs";
|
import { env } from "~/env.mjs";
|
||||||
|
|
||||||
import OpenAI from "openai";
|
import OpenAI from "openai";
|
||||||
import { type ChatCompletion, type ChatCompletionChunk, type CompletionCreateParams } from "openai/resources/chat";
|
import {
|
||||||
|
type ChatCompletion,
|
||||||
|
type ChatCompletionChunk,
|
||||||
|
type CompletionCreateParams,
|
||||||
|
} from "openai/resources/chat";
|
||||||
|
|
||||||
// console.log("creating openai client");
|
// console.log("creating openai client");
|
||||||
|
|
||||||
@@ -10,7 +14,7 @@ export const openai = new OpenAI({ apiKey: env.OPENAI_API_KEY });
|
|||||||
|
|
||||||
export const mergeStreamedChunks = (
|
export const mergeStreamedChunks = (
|
||||||
base: ChatCompletion | null,
|
base: ChatCompletion | null,
|
||||||
chunk: ChatCompletionChunk
|
chunk: ChatCompletionChunk,
|
||||||
): ChatCompletion => {
|
): ChatCompletion => {
|
||||||
if (base === null) {
|
if (base === null) {
|
||||||
return mergeStreamedChunks({ ...chunk, choices: [] }, chunk);
|
return mergeStreamedChunks({ ...chunk, choices: [] }, chunk);
|
||||||
@@ -25,11 +29,14 @@ export const mergeStreamedChunks = (
|
|||||||
|
|
||||||
if (choice.delta?.content)
|
if (choice.delta?.content)
|
||||||
baseChoice.message.content =
|
baseChoice.message.content =
|
||||||
(baseChoice.message.content as string ?? "") + (choice.delta.content ?? "");
|
((baseChoice.message.content as string) ?? "") + (choice.delta.content ?? "");
|
||||||
if (choice.delta?.function_call) {
|
if (choice.delta?.function_call) {
|
||||||
const fnCall = baseChoice.message.function_call ?? {};
|
const fnCall = baseChoice.message.function_call ?? {};
|
||||||
fnCall.name = (fnCall.name as string ?? "") + (choice.delta.function_call.name as string ?? "");
|
fnCall.name =
|
||||||
fnCall.arguments = (fnCall.arguments as string ?? "") + (choice.delta.function_call.arguments as string ?? "");
|
((fnCall.name as string) ?? "") + ((choice.delta.function_call.name as string) ?? "");
|
||||||
|
fnCall.arguments =
|
||||||
|
((fnCall.arguments as string) ?? "") +
|
||||||
|
((choice.delta.function_call.arguments as string) ?? "");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
choices.push({ ...omit(choice, "delta"), message: { role: "assistant", ...choice.delta } });
|
choices.push({ ...omit(choice, "delta"), message: { role: "assistant", ...choice.delta } });
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ const openAICompletionTokensToDollars: { [key in OpenAIChatModel]: number } = {
|
|||||||
export const calculateTokenCost = (
|
export const calculateTokenCost = (
|
||||||
model: SupportedModel | null,
|
model: SupportedModel | null,
|
||||||
numTokens: number,
|
numTokens: number,
|
||||||
isCompletion = false
|
isCompletion = false,
|
||||||
) => {
|
) => {
|
||||||
if (!model) return 0;
|
if (!model) return 0;
|
||||||
if (model in OpenAIChatModel) {
|
if (model in OpenAIChatModel) {
|
||||||
@@ -37,7 +37,7 @@ export const calculateTokenCost = (
|
|||||||
const calculateOpenAIChatTokenCost = (
|
const calculateOpenAIChatTokenCost = (
|
||||||
model: OpenAIChatModel,
|
model: OpenAIChatModel,
|
||||||
numTokens: number,
|
numTokens: number,
|
||||||
isCompletion: boolean
|
isCompletion: boolean,
|
||||||
) => {
|
) => {
|
||||||
const tokensToDollars = isCompletion
|
const tokensToDollars = isCompletion
|
||||||
? openAICompletionTokensToDollars[model]
|
? openAICompletionTokensToDollars[model]
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ interface GPTTokensMessageItem {
|
|||||||
|
|
||||||
export const countOpenAIChatTokens = (
|
export const countOpenAIChatTokens = (
|
||||||
model: OpenAIChatModel,
|
model: OpenAIChatModel,
|
||||||
messages: ChatCompletion.Choice.Message[]
|
messages: ChatCompletion.Choice.Message[],
|
||||||
) => {
|
) => {
|
||||||
return new GPTTokens({ model, messages: messages as unknown as GPTTokensMessageItem[] })
|
return new GPTTokens({ model, messages: messages as unknown as GPTTokensMessageItem[] })
|
||||||
.usedTokens;
|
.usedTokens;
|
||||||
|
|||||||
@@ -16,5 +16,5 @@ export const formatTimePast = (date: Date) => {
|
|||||||
const minuteDiff = Math.floor(now.diff(date, "minute"));
|
const minuteDiff = Math.floor(now.diff(date, "minute"));
|
||||||
if (minuteDiff > 0) return dayjs.duration(-minuteDiff, "minutes").humanize(true);
|
if (minuteDiff > 0) return dayjs.duration(-minuteDiff, "minutes").humanize(true);
|
||||||
|
|
||||||
return 'a few seconds ago'
|
return "a few seconds ago";
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ export const useExperiment = () => {
|
|||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const experiment = api.experiments.get.useQuery(
|
const experiment = api.experiments.get.useQuery(
|
||||||
{ id: router.query.id as string },
|
{ id: router.query.id as string },
|
||||||
{ enabled: !!router.query.id }
|
{ enabled: !!router.query.id },
|
||||||
);
|
);
|
||||||
|
|
||||||
return experiment;
|
return experiment;
|
||||||
@@ -16,7 +16,7 @@ type AsyncFunction<T extends unknown[], U> = (...args: T) => Promise<U>;
|
|||||||
|
|
||||||
export function useHandledAsyncCallback<T extends unknown[], U>(
|
export function useHandledAsyncCallback<T extends unknown[], U>(
|
||||||
callback: AsyncFunction<T, U>,
|
callback: AsyncFunction<T, U>,
|
||||||
deps: React.DependencyList
|
deps: React.DependencyList,
|
||||||
) {
|
) {
|
||||||
const [loading, setLoading] = useState(0);
|
const [loading, setLoading] = useState(0);
|
||||||
const [error, setError] = useState<Error | null>(null);
|
const [error, setError] = useState<Error | null>(null);
|
||||||
|
|||||||
@@ -21,13 +21,6 @@
|
|||||||
"~/*": ["./src/*"]
|
"~/*": ["./src/*"]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"include": [
|
"include": [".eslintrc.cjs", "next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.cjs", "**/*.mjs"],
|
||||||
".eslintrc.cjs",
|
|
||||||
"next-env.d.ts",
|
|
||||||
"**/*.ts",
|
|
||||||
"**/*.tsx",
|
|
||||||
"**/*.cjs",
|
|
||||||
"**/*.mjs"
|
|
||||||
],
|
|
||||||
"exclude": ["node_modules"]
|
"exclude": ["node_modules"]
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user