diff --git a/app/src/modelProviders/openai-ChatCompletion/getCompletion.ts b/app/src/modelProviders/openai-ChatCompletion/getCompletion.ts index fcfed1f..b634c72 100644 --- a/app/src/modelProviders/openai-ChatCompletion/getCompletion.ts +++ b/app/src/modelProviders/openai-ChatCompletion/getCompletion.ts @@ -16,7 +16,16 @@ export async function getCompletion( try { if (onStream) { const resp = await openai.chat.completions.create( - { ...input, stream: true }, + { + ...input, + stream: true, + openpipe: { + tags: { + prompt_id: "getCompletion", + stream: "true", + }, + }, + }, { maxRetries: 0, }, @@ -34,7 +43,16 @@ export async function getCompletion( } } else { const resp = await openai.chat.completions.create( - { ...input, stream: false }, + { + ...input, + stream: false, + openpipe: { + tags: { + prompt_id: "getCompletion", + stream: "false", + }, + }, + }, { maxRetries: 0, }, diff --git a/app/src/server/api/autogenerate/autogenerateDatasetEntries.ts b/app/src/server/api/autogenerate/autogenerateDatasetEntries.ts index e65afdf..1fb18a3 100644 --- a/app/src/server/api/autogenerate/autogenerateDatasetEntries.ts +++ b/app/src/server/api/autogenerate/autogenerateDatasetEntries.ts @@ -89,6 +89,11 @@ export const autogenerateDatasetEntries = async ( function_call: { name: "add_list_of_data" }, temperature: 0.5, + openpipe: { + tags: { + prompt_id: "autogenerateDatasetEntries", + }, + }, }); const completionCallbacks = batchSizes.map((batchSize) => diff --git a/app/src/server/api/autogenerate/autogenerateScenarioValues.ts b/app/src/server/api/autogenerate/autogenerateScenarioValues.ts index ef19423..fd488e6 100644 --- a/app/src/server/api/autogenerate/autogenerateScenarioValues.ts +++ b/app/src/server/api/autogenerate/autogenerateScenarioValues.ts @@ -98,6 +98,11 @@ export const autogenerateScenarioValues = async ( function_call: { name: "add_scenario" }, temperature: 0.5, + openpipe: { + tags: { + prompt_id: "autogenerateScenarioValues", + }, + }, }); const parsed = JSON.parse( diff --git a/app/src/server/utils/deriveNewContructFn.ts b/app/src/server/utils/deriveNewContructFn.ts index 2d2717d..a356e30 100644 --- a/app/src/server/utils/deriveNewContructFn.ts +++ b/app/src/server/utils/deriveNewContructFn.ts @@ -109,6 +109,12 @@ const requestUpdatedPromptFunction = async ( function_call: { name: "update_prompt_constructor_function", }, + openpipe: { + tags: { + prompt_id: "deriveNewConstructFn", + model_translation: (!!newModel).toString(), + }, + }, }); const argString = completion.choices[0]?.message?.function_call?.arguments || "{}"; diff --git a/app/src/server/utils/runOneEval.ts b/app/src/server/utils/runOneEval.ts index 87f4664..e89fa4f 100644 --- a/app/src/server/utils/runOneEval.ts +++ b/app/src/server/utils/runOneEval.ts @@ -53,6 +53,11 @@ export const runGpt4Eval = async ( }, }, ], + openpipe: { + tags: { + prompt_id: "runOneEval", + }, + }, }); try {