workshop v0

This commit is contained in:
dexhorthy
2025-05-14 14:48:47 -07:00
parent dcdcfb7f08
commit 7925103c87
40 changed files with 33 additions and 4420 deletions

1
workshops/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
baml_client/

View File

@@ -1,154 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import type { BamlRuntime, FunctionResult, BamlCtxManager, ClientRegistry, Image, Audio, Collector } from "@boundaryml/baml"
import { toBamlError, BamlStream, type HTTPRequest } from "@boundaryml/baml"
import type { Checked, Check, RecursivePartialNull as MovedRecursivePartialNull } from "./types"
import type { partial_types } from "./partial_types"
import type * as types from "./types"
import type {DoneForNow} from "./types"
import type TypeBuilder from "./type_builder"
import { AsyncHttpRequest, AsyncHttpStreamRequest } from "./async_request"
import { LlmResponseParser, LlmStreamParser } from "./parser"
import { DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX, DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME } from "./globals"
/**
* @deprecated Use RecursivePartialNull from 'baml_client/types' instead.
*/
export type RecursivePartialNull<T> = MovedRecursivePartialNull<T>
type BamlCallOptions = {
tb?: TypeBuilder
clientRegistry?: ClientRegistry
collector?: Collector | Collector[]
}
export class BamlAsyncClient {
private runtime: BamlRuntime
private ctxManager: BamlCtxManager
private streamClient: BamlStreamClient
private httpRequest: AsyncHttpRequest
private httpStreamRequest: AsyncHttpStreamRequest
private llmResponseParser: LlmResponseParser
private llmStreamParser: LlmStreamParser
private bamlOptions: BamlCallOptions
constructor(runtime: BamlRuntime, ctxManager: BamlCtxManager, bamlOptions?: BamlCallOptions) {
this.runtime = runtime
this.ctxManager = ctxManager
this.streamClient = new BamlStreamClient(runtime, ctxManager, bamlOptions)
this.httpRequest = new AsyncHttpRequest(runtime, ctxManager)
this.httpStreamRequest = new AsyncHttpStreamRequest(runtime, ctxManager)
this.llmResponseParser = new LlmResponseParser(runtime, ctxManager)
this.llmStreamParser = new LlmStreamParser(runtime, ctxManager)
this.bamlOptions = bamlOptions || {}
}
withOptions(bamlOptions: BamlCallOptions) {
return new BamlAsyncClient(this.runtime, this.ctxManager, bamlOptions)
}
get stream() {
return this.streamClient
}
get request() {
return this.httpRequest
}
get streamRequest() {
return this.httpStreamRequest
}
get parse() {
return this.llmResponseParser
}
get parseStream() {
return this.llmStreamParser
}
async DetermineNextStep(
thread: string,
__baml_options__?: BamlCallOptions
): Promise<DoneForNow> {
try {
const options = { ...this.bamlOptions, ...(__baml_options__ || {}) }
const collector = options.collector ? (Array.isArray(options.collector) ? options.collector : [options.collector]) : [];
const raw = await this.runtime.callFunction(
"DetermineNextStep",
{
"thread": thread
},
this.ctxManager.cloneContext(),
options.tb?.__tb(),
options.clientRegistry,
collector,
)
return raw.parsed(false) as DoneForNow
} catch (error) {
throw toBamlError(error);
}
}
}
class BamlStreamClient {
private runtime: BamlRuntime
private ctxManager: BamlCtxManager
private bamlOptions: BamlCallOptions
constructor(runtime: BamlRuntime, ctxManager: BamlCtxManager, bamlOptions?: BamlCallOptions) {
this.runtime = runtime
this.ctxManager = ctxManager
this.bamlOptions = bamlOptions || {}
}
DetermineNextStep(
thread: string,
__baml_options__?: { tb?: TypeBuilder, clientRegistry?: ClientRegistry, collector?: Collector | Collector[] }
): BamlStream<partial_types.DoneForNow, DoneForNow> {
try {
const options = { ...this.bamlOptions, ...(__baml_options__ || {}) }
const collector = options.collector ? (Array.isArray(options.collector) ? options.collector : [options.collector]) : [];
const raw = this.runtime.streamFunction(
"DetermineNextStep",
{
"thread": thread
},
undefined,
this.ctxManager.cloneContext(),
options.tb?.__tb(),
options.clientRegistry,
collector,
)
return new BamlStream<partial_types.DoneForNow, DoneForNow>(
raw,
(a): partial_types.DoneForNow => a,
(a): DoneForNow => a,
this.ctxManager.cloneContext(),
)
} catch (error) {
throw toBamlError(error);
}
}
}
export const b = new BamlAsyncClient(DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME, DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX)

View File

@@ -1,80 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import type { BamlRuntime, BamlCtxManager, ClientRegistry, Image, Audio } from "@boundaryml/baml"
import { toBamlError, HTTPRequest } from "@boundaryml/baml"
import type { Checked, Check } from "./types"
import type * as types from "./types"
import type {DoneForNow} from "./types"
import type TypeBuilder from "./type_builder"
type BamlCallOptions = {
tb?: TypeBuilder
clientRegistry?: ClientRegistry
}
export class AsyncHttpRequest {
constructor(private runtime: BamlRuntime, private ctxManager: BamlCtxManager) {}
async DetermineNextStep(
thread: string,
__baml_options__?: BamlCallOptions
): Promise<HTTPRequest> {
try {
return await this.runtime.buildRequest(
"DetermineNextStep",
{
"thread": thread
},
this.ctxManager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
false,
)
} catch (error) {
throw toBamlError(error);
}
}
}
export class AsyncHttpStreamRequest {
constructor(private runtime: BamlRuntime, private ctxManager: BamlCtxManager) {}
async DetermineNextStep(
thread: string,
__baml_options__?: BamlCallOptions
): Promise<HTTPRequest> {
try {
return await this.runtime.buildRequest(
"DetermineNextStep",
{
"thread": thread
},
this.ctxManager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
true,
)
} catch (error) {
throw toBamlError(error);
}
}
}

View File

@@ -1,19 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
export { setLogLevel, getLogLevel, setLogJson } from "@boundaryml/baml/logging";
export { resetBamlEnvVars } from "./globals";

View File

@@ -1,67 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import { BamlRuntime, BamlCtxManager } from '@boundaryml/baml'
import { getBamlFiles } from './inlinedbaml'
export const DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME = BamlRuntime.fromFiles(
'baml_src',
getBamlFiles(),
process.env
)
export const DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX = new BamlCtxManager(DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME)
export function resetBamlEnvVars(envVars: Record<string, string | undefined>) {
if (DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.allowResets()) {
const envVarsToReset = Object.fromEntries(Object.entries(envVars).filter((kv): kv is [string, string] => kv[1] !== undefined));
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME.reset('baml_src', getBamlFiles(), envVarsToReset)
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.reset()
} else {
throw new Error('BamlError: Cannot reset BAML environment variables while there are active BAML contexts.')
}
}
const patchedLoad = (originalFn: any) => (...args: any[]) => {
const result = originalFn(...args);
try {
// Dont fail if env vars fail to reset
resetBamlEnvVars(process.env);
} catch (e) {
console.error(e);
}
return result;
};
try {
const dotenv = require('dotenv');
// Monkeypatch load function to call resetBamlEnvVars after execution
// Apply the patch
dotenv.config = patchedLoad(dotenv.config);
dotenv.configDotenv = patchedLoad(dotenv.configDotenv);
dotenv.populate = patchedLoad(dotenv.populate);
} catch (error) {
// dotenv is not installed, so we do nothing
}
// also patch process.loadEnvFile
if (process.loadEnvFile) {
process.loadEnvFile = patchedLoad(process.loadEnvFile);
}

View File

@@ -1,47 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
/**
* If this import fails, you may need to upgrade @boundaryml/baml.
*
* Please upgrade @boundaryml/baml to 0.88.0.
*
* $ npm install @boundaryml/baml@0.88.0
* $ yarn add @boundaryml/baml@0.88.0
* $ pnpm add @boundaryml/baml@0.88.0
*
* If nothing else works, please ask for help:
*
* https://github.com/boundaryml/baml/issues
* https://boundaryml.com/discord
*
**/
import { ThrowIfVersionMismatch } from "@boundaryml/baml";
export const version = "0.88.0";
ThrowIfVersionMismatch(version);
export { b } from "./async_client"
export * from "./types"
export type { partial_types } from "./partial_types"
export * from "./tracing"
export { resetBamlEnvVars } from "./globals"
export { BamlClientHttpError, BamlValidationError, BamlClientFinishReasonError } from "@boundaryml/baml"

View File

@@ -1,26 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
const fileMap = {
"agent.baml": "class DoneForNow {\n intent \"done_for_now\"\n message string \n}\n\nclient<llm> Qwen3 {\n provider \"openai-generic\"\n options {\n base_url \"https://model-4w7jrl6w.api.baseten.co/environments/production/sync/v1\"\n api_key \"QQksI6f4.mFAZX9D0DevBQMUWs2OV8gcfzMUinN9N\"\n }\n}\n\nfunction DetermineNextStep(\n thread: string \n) -> DoneForNow {\n client Qwen3\n\n prompt #\"\n {{ _.role(\"system\") }}\n /nothink\n\n You are a helpful assistant that can help with tasks.\n\n {{ _.role(\"user\") }}\n\n You are working on the following thread:\n\n {{ thread }}\n\n What should the next step be?\n\n {{ ctx.output_format }}\n \"#\n}\n\ntest HelloWorld {\n functions [DetermineNextStep]\n args {\n thread #\"\n {\n \"type\": \"user_input\",\n \"data\": \"hello!\"\n }\n \"#\n }\n}",
"clients.baml": "// Learn more about clients at https://docs.boundaryml.com/docs/snippets/clients/overview\n\nclient<llm> CustomGPT4o {\n provider openai\n options {\n model \"gpt-4o\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient<llm> CustomGPT4oMini {\n provider openai\n retry_policy Exponential\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient<llm> CustomSonnet {\n provider anthropic\n options {\n model \"claude-3-5-sonnet-20241022\"\n api_key env.ANTHROPIC_API_KEY\n }\n}\n\n\nclient<llm> CustomHaiku {\n provider anthropic\n retry_policy Constant\n options {\n model \"claude-3-haiku-20240307\"\n api_key env.ANTHROPIC_API_KEY\n }\n}\n\n// https://docs.boundaryml.com/docs/snippets/clients/round-robin\nclient<llm> CustomFast {\n provider round-robin\n options {\n // This will alternate between the two clients\n strategy [CustomGPT4oMini, CustomHaiku]\n }\n}\n\n// https://docs.boundaryml.com/docs/snippets/clients/fallback\nclient<llm> OpenaiFallback {\n provider fallback\n options {\n // This will try the clients in order until one succeeds\n strategy [CustomGPT4oMini, CustomGPT4oMini]\n }\n}\n\n// https://docs.boundaryml.com/docs/snippets/clients/retry\nretry_policy Constant {\n max_retries 3\n // Strategy is optional\n strategy {\n type constant_delay\n delay_ms 200\n }\n}\n\nretry_policy Exponential {\n max_retries 2\n // Strategy is optional\n strategy {\n type exponential_backoff\n delay_ms 300\n multiplier 1.5\n max_delay_ms 10000\n }\n}",
"generators.baml": "// This helps use auto generate libraries you can use in the language of\n// your choice. You can have multiple generators if you use multiple languages.\n// Just ensure that the output_dir is different for each generator.\ngenerator target {\n // Valid values: \"python/pydantic\", \"typescript\", \"ruby/sorbet\", \"rest/openapi\"\n output_type \"typescript\"\n\n // Where the generated code will be saved (relative to baml_src/)\n output_dir \"../\"\n\n // The version of the BAML package you have installed (e.g. same version as your baml-py or @boundaryml/baml).\n // The BAML VSCode extension version should also match this version.\n version \"0.88.0\"\n\n // Valid values: \"sync\", \"async\"\n // This controls what `b.FunctionName()` will be (sync or async).\n default_client_mode async\n}\n",
}
export const getBamlFiles = () => {
return fileMap;
}

View File

@@ -1,72 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import type { BamlRuntime, BamlCtxManager, ClientRegistry, Image, Audio, Collector } from "@boundaryml/baml"
import { toBamlError } from "@boundaryml/baml"
import type { Checked, Check } from "./types"
import type { partial_types } from "./partial_types"
import type * as types from "./types"
import type {DoneForNow} from "./types"
import type TypeBuilder from "./type_builder"
export class LlmResponseParser {
constructor(private runtime: BamlRuntime, private ctxManager: BamlCtxManager) {}
DetermineNextStep(
llmResponse: string,
__baml_options__?: { tb?: TypeBuilder, clientRegistry?: ClientRegistry }
): DoneForNow {
try {
return this.runtime.parseLlmResponse(
"DetermineNextStep",
llmResponse,
false,
this.ctxManager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
) as DoneForNow
} catch (error) {
throw toBamlError(error);
}
}
}
export class LlmStreamParser {
constructor(private runtime: BamlRuntime, private ctxManager: BamlCtxManager) {}
DetermineNextStep(
llmResponse: string,
__baml_options__?: { tb?: TypeBuilder, clientRegistry?: ClientRegistry }
): partial_types.DoneForNow {
try {
return this.runtime.parseLlmResponse(
"DetermineNextStep",
llmResponse,
true,
this.ctxManager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
) as partial_types.DoneForNow
} catch (error) {
throw toBamlError(error);
}
}
}

View File

@@ -1,42 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import type { Image, Audio } from "@boundaryml/baml"
import type { Checked, Check } from "./types"
import type { DoneForNow } from "./types"
import type * as types from "./types"
/******************************************************************************
*
* These types are used for streaming, for when an instance of a type
* is still being built up and any of its fields is not yet fully available.
*
******************************************************************************/
export interface StreamState<T> {
value: T
state: "Pending" | "Incomplete" | "Complete"
}
export namespace partial_types {
export interface DoneForNow {
intent: "done_for_now"
message?: (string | null)
}
}

View File

@@ -1,113 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import type { BamlRuntime, FunctionResult, BamlCtxManager, Image, Audio, ClientRegistry, Collector } from "@boundaryml/baml"
import { toBamlError, type HTTPRequest } from "@boundaryml/baml"
import type { Checked, Check, RecursivePartialNull as MovedRecursivePartialNull } from "./types"
import type * as types from "./types"
import type {DoneForNow} from "./types"
import type TypeBuilder from "./type_builder"
import { HttpRequest, HttpStreamRequest } from "./sync_request"
import { LlmResponseParser, LlmStreamParser } from "./parser"
import { DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX, DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME } from "./globals"
/**
* @deprecated Use RecursivePartialNull from 'baml_client/types' instead.
* Example:
* ```ts
* import { RecursivePartialNull } from './baml_client/types'
* ```
*/
export type RecursivePartialNull<T> = MovedRecursivePartialNull<T>;
type BamlCallOptions = {
tb?: TypeBuilder
clientRegistry?: ClientRegistry
collector?: Collector | Collector[]
}
export class BamlSyncClient {
private httpRequest: HttpRequest
private httpStreamRequest: HttpStreamRequest
private llmResponseParser: LlmResponseParser
private llmStreamParser: LlmStreamParser
private bamlOptions: BamlCallOptions
constructor(private runtime: BamlRuntime, private ctxManager: BamlCtxManager, private bamlOptions?: BamlCallOptions) {
this.httpRequest = new HttpRequest(runtime, ctxManager)
this.httpStreamRequest = new HttpStreamRequest(runtime, ctxManager)
this.llmResponseParser = new LlmResponseParser(runtime, ctxManager)
this.llmStreamParser = new LlmStreamParser(runtime, ctxManager)
this.bamlOptions = bamlOptions || {}
}
withOptions(bamlOptions: BamlCallOptions) {
return new BamlSyncClient(this.runtime, this.ctxManager, bamlOptions)
}
/*
* @deprecated NOT IMPLEMENTED as streaming must by async. We
* are not providing an async version as we want to reserve the
* right to provide a sync version in the future.
*/
get stream() {
throw new Error("stream is not available in BamlSyncClient. Use `import { b } from 'baml_client/async_client")
}
get request() {
return this.httpRequest
}
get streamRequest() {
return this.httpStreamRequest
}
get parse() {
return this.llmResponseParser
}
get parseStream() {
return this.llmStreamParser
}
DetermineNextStep(
thread: string,
__baml_options__?: BamlCallOptions
): DoneForNow {
try {
const options = { ...this.bamlOptions, ...(__baml_options__ || {}) }
const collector = options.collector ? (Array.isArray(options.collector) ? options.collector : [options.collector]) : [];
const raw = this.runtime.callFunctionSync(
"DetermineNextStep",
{
"thread": thread
},
this.ctxManager.cloneContext(),
options.tb?.__tb(),
options.clientRegistry,
collector,
)
return raw.parsed(false) as DoneForNow
} catch (error: any) {
throw toBamlError(error);
}
}
}
export const b = new BamlSyncClient(DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME, DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX)

View File

@@ -1,80 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import type { BamlRuntime, BamlCtxManager, ClientRegistry, Image, Audio } from "@boundaryml/baml"
import { toBamlError, HTTPRequest } from "@boundaryml/baml"
import type { Checked, Check } from "./types"
import type * as types from "./types"
import type {DoneForNow} from "./types"
import type TypeBuilder from "./type_builder"
type BamlCallOptions = {
tb?: TypeBuilder
clientRegistry?: ClientRegistry
}
export class HttpRequest {
constructor(private runtime: BamlRuntime, private ctxManager: BamlCtxManager) {}
DetermineNextStep(
thread: string,
__baml_options__?: BamlCallOptions
): HTTPRequest {
try {
return this.runtime.buildRequestSync(
"DetermineNextStep",
{
"thread": thread
},
this.ctxManager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
false,
)
} catch (error) {
throw toBamlError(error);
}
}
}
export class HttpStreamRequest {
constructor(private runtime: BamlRuntime, private ctxManager: BamlCtxManager) {}
DetermineNextStep(
thread: string,
__baml_options__?: BamlCallOptions
): HTTPRequest {
try {
return this.runtime.buildRequestSync(
"DetermineNextStep",
{
"thread": thread
},
this.ctxManager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
true,
)
} catch (error) {
throw toBamlError(error);
}
}
}

View File

@@ -1,33 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import type { BamlLogEvent } from '@boundaryml/baml';
import { DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX } from './globals';
const traceAsync =
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.traceFnAsync.bind(DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX)
const traceSync =
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.traceFnSync.bind(DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX)
const setTags =
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.upsertTags.bind(DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX)
const flush = () => {
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.flush.bind(DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX)()
}
const onLogEvent = (callback: undefined | ((event: BamlLogEvent) => void)) =>
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.onLogEvent(callback)
export { traceAsync, traceSync, setTags, flush, onLogEvent }

View File

@@ -1,106 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import { FieldType } from '@boundaryml/baml/native'
import { TypeBuilder as _TypeBuilder, EnumBuilder, EnumViewer, ClassBuilder, ClassViewer } from '@boundaryml/baml/type_builder'
import { DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME } from "./globals"
export default class TypeBuilder {
private tb: _TypeBuilder;
DoneForNow: ClassViewer<'DoneForNow', "intent" | "message">;
constructor() {
this.tb = new _TypeBuilder({
classes: new Set([
"DoneForNow",
]),
enums: new Set([
]),
runtime: DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME
});
this.DoneForNow = this.tb.classViewer("DoneForNow", [
"intent","message",
]);
}
__tb() {
return this.tb._tb();
}
string(): FieldType {
return this.tb.string()
}
literalString(value: string): FieldType {
return this.tb.literalString(value)
}
literalInt(value: number): FieldType {
return this.tb.literalInt(value)
}
literalBool(value: boolean): FieldType {
return this.tb.literalBool(value)
}
int(): FieldType {
return this.tb.int()
}
float(): FieldType {
return this.tb.float()
}
bool(): FieldType {
return this.tb.bool()
}
list(type: FieldType): FieldType {
return this.tb.list(type)
}
null(): FieldType {
return this.tb.null()
}
map(key: FieldType, value: FieldType): FieldType {
return this.tb.map(key, value)
}
union(types: FieldType[]): FieldType {
return this.tb.union(types)
}
addClass<Name extends string>(name: Name): ClassBuilder<Name> {
return this.tb.addClass(name);
}
addEnum<Name extends string>(name: Name): EnumBuilder<Name> {
return this.tb.addEnum(name);
}
addBaml(baml: string): void {
this.tb.addBaml(baml);
}
}

View File

@@ -1,53 +0,0 @@
/*************************************************************************************************
Welcome to Baml! To use this generated code, please run one of the following:
$ npm install @boundaryml/baml
$ yarn add @boundaryml/baml
$ pnpm add @boundaryml/baml
*************************************************************************************************/
// This file was generated by BAML: do not edit it. Instead, edit the BAML
// files and re-generate this code.
//
/* eslint-disable */
// tslint:disable
// @ts-nocheck
// biome-ignore format: autogenerated code
import type { Image, Audio } from "@boundaryml/baml"
/**
* Recursively partial type that can be null.
*
* @deprecated Use types from the `partial_types` namespace instead, which provides type-safe partial implementations
* @template T The type to make recursively partial.
*/
export type RecursivePartialNull<T> = T extends object
? { [P in keyof T]?: RecursivePartialNull<T[P]> }
: T | null;
export interface Checked<T,CheckName extends string = string> {
value: T,
checks: Record<CheckName, Check>,
}
export interface Check {
name: string,
expr: string
status: "succeeded" | "failed"
}
export function all_succeeded<CheckName extends string>(checks: Record<CheckName, Check>): boolean {
return get_checks(checks).every(check => check.status === "succeeded")
}
export function get_checks<CheckName extends string>(checks: Record<CheckName, Check>): Check[] {
return Object.values(checks)
}
export interface DoneForNow {
intent: "done_for_now"
message: string
}

File diff suppressed because it is too large Load Diff

View File

@@ -127,6 +127,7 @@ sections:
client "openai/gpt-4o"
- text: Set your env vars
command: |
export BASETEN_API_KEY=...
export BASETEN_BASE_URL=...

View File

@@ -1,51 +0,0 @@
class DoneForNow {
intent "done_for_now"
message string
}
function DetermineNextStep(
thread: string
) -> CalculatorTools | DoneForNow {
client "openai/gpt-4o"
prompt #"
{{ _.role("system") }}
You are a helpful assistant that can help with tasks.
{{ _.role("user") }}
You are working on the following thread:
{{ thread }}
What should the next step be?
{{ ctx.output_format }}
"#
}
test HelloWorld {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "hello!"
}
"#
}
}
test MathOperation {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "can you multiply 3 and 4?"
}
"#
}
}

View File

@@ -1,53 +0,0 @@
class DoneForNow {
intent "done_for_now"
message string
}
function DetermineNextStep(
thread: string
) -> CalculatorTools | DoneForNow {
client "openai/gpt-4o"
prompt #"
{{ _.role("system") }}
You are a helpful assistant that can help with tasks.
{{ _.role("user") }}
You are working on the following thread:
{{ thread }}
What should the next step be?
{{ ctx.output_format }}
"#
}
test HelloWorld {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "hello!"
}
"#
}
@@assert(hello, {{this.intent == "done_for_now"}})
}
test MathOperation {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "can you multiply 3 and 4?"
}
"#
}
@@assert(math_operation, {{this.intent == "multiply"}})
}

View File

@@ -1,105 +0,0 @@
class DoneForNow {
intent "done_for_now"
message string
}
function DetermineNextStep(
thread: string
) -> CalculatorTools | DoneForNow {
client "openai/gpt-4o"
prompt #"
{{ _.role("system") }}
You are a helpful assistant that can help with tasks.
{{ _.role("user") }}
You are working on the following thread:
{{ thread }}
What should the next step be?
{{ ctx.output_format }}
"#
}
test HelloWorld {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "hello!"
}
"#
}
@@assert(intent, {{this.intent == "done_for_now"}})
}
test MathOperation {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "can you multiply 3 and 4?"
}
"#
}
@@assert(intent, {{this.intent == "multiply"}})
}
test LongMath {
functions [DetermineNextStep]
args {
thread #"
[
{
"type": "user_input",
"data": "can you multiply 3 and 4, then divide the result by 2 and then add 12 to that result?"
},
{
"type": "tool_call",
"data": {
"intent": "multiply",
"a": 3,
"b": 4
}
},
{
"type": "tool_response",
"data": 12
},
{
"type": "tool_call",
"data": {
"intent": "divide",
"a": 12,
"b": 2
}
},
{
"type": "tool_response",
"data": 6
},
{
"type": "tool_call",
"data": {
"intent": "add",
"a": 6,
"b": 12
}
},
{
"type": "tool_response",
"data": 18
}
]
"#
}
@@assert(intent, {{this.intent == "done_for_now"}})
@@assert(answer, {{"18" in this.message}})
}

View File

@@ -1,117 +0,0 @@
// human tools are async requests to a human
type HumanTools = ClarificationRequest | DoneForNow
class ClarificationRequest {
intent "request_more_information" @description("you can request more information from me")
message string
}
class DoneForNow {
intent "done_for_now"
message string @description(#"
message to send to the user about the work that was done.
"#)
}
function DetermineNextStep(
thread: string
) -> HumanTools | CalculatorTools {
client "openai/gpt-4o"
prompt #"
{{ _.role("system") }}
You are a helpful assistant that can help with tasks.
{{ _.role("user") }}
You are working on the following thread:
{{ thread }}
What should the next step be?
{{ ctx.output_format }}
"#
}
test HelloWorld {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "hello!"
}
"#
}
@@assert(intent, {{this.intent == "done_for_now"}})
}
test MathOperation {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "can you multiply 3 and 4?"
}
"#
}
@@assert(intent, {{this.intent == "multiply"}})
}
test LongMath {
functions [DetermineNextStep]
args {
thread #"
[
{
"type": "user_input",
"data": "can you multiply 3 and 4, then divide the result by 2 and then add 12 to that result?"
},
{
"type": "tool_call",
"data": {
"intent": "multiply",
"a": 3,
"b": 4
}
},
{
"type": "tool_response",
"data": 12
},
{
"type": "tool_call",
"data": {
"intent": "divide",
"a": 12,
"b": 2
}
},
{
"type": "tool_response",
"data": 6
},
{
"type": "tool_call",
"data": {
"intent": "add",
"a": 6,
"b": 12
}
},
{
"type": "tool_response",
"data": 18
}
]
"#
}
@@assert(intent, {{this.intent == "done_for_now"}})
@@assert(answer, {{"18" in this.message}})
}

View File

@@ -1,87 +0,0 @@
import { AddTool, SubtractTool, DivideTool, MultiplyTool, b } from "../baml_client";
export interface Event {
type: string
data: any;
}
export class Thread {
events: Event[] = [];
constructor(events: Event[]) {
this.events = events;
}
serializeForLLM() {
// can change this to whatever custom serialization you want to do, XML, etc
// e.g. https://github.com/got-agents/agents/blob/59ebbfa236fc376618f16ee08eb0f3bf7b698892/linear-assistant-ts/src/agent.ts#L66-L105
return JSON.stringify(this.events);
}
}
export type CalculatorTool = AddTool | SubtractTool | MultiplyTool | DivideTool;
export async function handleNextStep(nextStep: CalculatorTool, thread: Thread): Promise<Thread> {
let result: number;
switch (nextStep.intent) {
case "add":
result = nextStep.a + nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "subtract":
result = nextStep.a - nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "multiply":
result = nextStep.a * nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "divide":
result = nextStep.a / nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
}
}
export async function agentLoop(thread: Thread): Promise<Thread> {
while (true) {
const nextStep = await b.DetermineNextStep(thread.serializeForLLM());
console.log("nextStep", nextStep);
thread.events.push({
"type": "tool_call",
"data": nextStep
});
switch (nextStep.intent) {
case "done_for_now":
case "request_more_information":
// response to human, return the thread
return thread;
case "add":
case "subtract":
case "multiply":
case "divide":
thread = await handleNextStep(nextStep, thread);
}
}
}

View File

@@ -1,50 +0,0 @@
// cli.ts lets you invoke the agent loop from the command line
import { agentLoop, Thread, Event } from "../src/agent";
export async function cli() {
// Get command line arguments, skipping the first two (node and script name)
const args = process.argv.slice(2);
if (args.length === 0) {
console.error("Error: Please provide a message as a command line argument");
process.exit(1);
}
// Join all arguments into a single message
const message = args.join(" ");
// Create a new thread with the user's message as the initial event
const thread = new Thread([{ type: "user_input", data: message }]);
// Run the agent loop with the thread
const result = await agentLoop(thread);
let lastEvent = result.events.slice(-1)[0];
while (lastEvent.data.intent === "request_more_information") {
const message = await askHuman(lastEvent.data.message);
thread.events.push({ type: "human_response", data: message });
const result = await agentLoop(thread);
lastEvent = result.events.slice(-1)[0];
}
// print the final result
// optional - you could loop here too
console.log(lastEvent.data.message);
process.exit(0);
}
async function askHuman(message: string) {
const readline = require('readline').createInterface({
input: process.stdin,
output: process.stdout
});
return new Promise((resolve) => {
readline.question(`${message}\n> `, (answer: string) => {
resolve(answer);
});
});
}

View File

@@ -1,146 +0,0 @@
// human tools are async requests to a human
type HumanTools = ClarificationRequest | DoneForNow
class ClarificationRequest {
intent "request_more_information" @description("you can request more information from me")
message string
}
class DoneForNow {
intent "done_for_now"
message string @description(#"
message to send to the user about the work that was done.
"#)
}
function DetermineNextStep(
thread: string
) -> HumanTools | CalculatorTools {
client "openai/gpt-4o"
prompt #"
{{ _.role("system") }}
You are a helpful assistant that can help with tasks.
{{ _.role("user") }}
You are working on the following thread:
{{ thread }}
What should the next step be?
{{ ctx.output_format }}
"#
}
test HelloWorld {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "hello!"
}
"#
}
@@assert(intent, {{this.intent == "done_for_now"}})
}
test MathOperation {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "can you multiply 3 and 4?"
}
"#
}
@@assert(intent, {{this.intent == "multiply"}})
}
test LongMath {
functions [DetermineNextStep]
args {
thread #"
[
{
"type": "user_input",
"data": "can you multiply 3 and 4, then divide the result by 2 and then add 12 to that result?"
},
{
"type": "tool_call",
"data": {
"intent": "multiply",
"a": 3,
"b": 4
}
},
{
"type": "tool_response",
"data": 12
},
{
"type": "tool_call",
"data": {
"intent": "divide",
"a": 12,
"b": 2
}
},
{
"type": "tool_response",
"data": 6
},
{
"type": "tool_call",
"data": {
"intent": "add",
"a": 6,
"b": 12
}
},
{
"type": "tool_response",
"data": 18
}
]
"#
}
@@assert(intent, {{this.intent == "done_for_now"}})
@@assert(answer, {{"18" in this.message}})
}
test MathOperationWithClarification {
functions [DetermineNextStep]
args {
thread #"
[{"type":"user_input","data":"can you multiply 3 and feee9ff10"}]
"#
}
@@assert(intent, {{this.intent == "request_more_information"}})
}
test MathOperationPostClarification {
functions [DetermineNextStep]
args {
thread #"
[
{"type":"user_input","data":"can you multiply 3 and FD*(#F&& ?"},
{"type":"tool_call","data":{"intent":"request_more_information","message":"It seems like there was a typo or mistake in your request. Could you please clarify or provide the correct numbers you would like to multiply?"}},
{"type":"human_response","data":"lets try 12 instead"},
]
"#
}
@@assert(intent, {{this.intent == "multiply"}})
@@assert(a, {{this.b == 12}})
@@assert(b, {{this.a == 3}})
}

View File

@@ -1,146 +0,0 @@
// human tools are async requests to a human
type HumanTools = ClarificationRequest | DoneForNow
class ClarificationRequest {
intent "request_more_information" @description("you can request more information from me")
message string
}
class DoneForNow {
intent "done_for_now"
message string @description(#"
message to send to the user about the work that was done.
"#)
}
function DetermineNextStep(
thread: string
) -> HumanTools | CalculatorTools {
client "openai/gpt-4o"
prompt #"
{{ _.role("system") }}
You are a helpful assistant that can help with tasks.
{{ _.role("user") }}
You are working on the following thread:
{{ thread }}
What should the next step be?
{{ ctx.output_format }}
"#
}
test HelloWorld {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "hello!"
}
"#
}
@@assert(intent, {{this.intent == "request_more_information"}})
}
test MathOperation {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "can you multiply 3 and 4?"
}
"#
}
@@assert(intent, {{this.intent == "multiply"}})
}
test LongMath {
functions [DetermineNextStep]
args {
thread #"
[
{
"type": "user_input",
"data": "can you multiply 3 and 4, then divide the result by 2 and then add 12 to that result?"
},
{
"type": "tool_call",
"data": {
"intent": "multiply",
"a": 3,
"b": 4
}
},
{
"type": "tool_response",
"data": 12
},
{
"type": "tool_call",
"data": {
"intent": "divide",
"a": 12,
"b": 2
}
},
{
"type": "tool_response",
"data": 6
},
{
"type": "tool_call",
"data": {
"intent": "add",
"a": 6,
"b": 12
}
},
{
"type": "tool_response",
"data": 18
}
]
"#
}
@@assert(intent, {{this.intent == "done_for_now"}})
@@assert(answer, {{"18" in this.message}})
}
test MathOperationWithClarification {
functions [DetermineNextStep]
args {
thread #"
[{"type":"user_input","data":"can you multiply 3 and feee9ff10"}]
"#
}
@@assert(intent, {{this.intent == "request_more_information"}})
}
test MathOperationPostClarification {
functions [DetermineNextStep]
args {
thread #"
[
{"type":"user_input","data":"can you multiply 3 and FD*(#F&& ?"},
{"type":"tool_call","data":{"intent":"request_more_information","message":"It seems like there was a typo or mistake in your request. Could you please clarify or provide the correct numbers you would like to multiply?"}},
{"type":"human_response","data":"lets try 12 instead"},
]
"#
}
@@assert(intent, {{this.intent == "multiply"}})
@@assert(a, {{this.b == 12}})
@@assert(b, {{this.a == 3}})
}

View File

@@ -1,152 +0,0 @@
// human tools are async requests to a human
type HumanTools = ClarificationRequest | DoneForNow
class ClarificationRequest {
intent "request_more_information" @description("you can request more information from me")
message string
}
class DoneForNow {
intent "done_for_now"
message string @description(#"
message to send to the user about the work that was done.
"#)
}
function DetermineNextStep(
thread: string
) -> HumanTools | CalculatorTools {
client "openai/gpt-4o"
prompt #"
{{ _.role("system") }}
You are a helpful assistant that can help with tasks.
{{ _.role("user") }}
You are working on the following thread:
{{ thread }}
What should the next step be?
{{ ctx.output_format }}
Always think about what to do next first, like:
- ...
- ...
- ...
{...} // schema
"#
}
test HelloWorld {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "hello!"
}
"#
}
@@assert(intent, {{this.intent == "request_more_information"}})
}
test MathOperation {
functions [DetermineNextStep]
args {
thread #"
{
"type": "user_input",
"data": "can you multiply 3 and 4?"
}
"#
}
@@assert(intent, {{this.intent == "multiply"}})
}
test LongMath {
functions [DetermineNextStep]
args {
thread #"
[
{
"type": "user_input",
"data": "can you multiply 3 and 4, then divide the result by 2 and then add 12 to that result?"
},
{
"type": "tool_call",
"data": {
"intent": "multiply",
"a": 3,
"b": 4
}
},
{
"type": "tool_response",
"data": 12
},
{
"type": "tool_call",
"data": {
"intent": "divide",
"a": 12,
"b": 2
}
},
{
"type": "tool_response",
"data": 6
},
{
"type": "tool_call",
"data": {
"intent": "add",
"a": 6,
"b": 12
}
},
{
"type": "tool_response",
"data": 18
}
]
"#
}
@@assert(intent, {{this.intent == "done_for_now"}})
@@assert(answer, {{"18" in this.message}})
}
test MathOperationWithClarification {
functions [DetermineNextStep]
args {
thread #"
[{"type":"user_input","data":"can you multiply 3 and feee9ff10"}]
"#
}
@@assert(intent, {{this.intent == "request_more_information"}})
}
test MathOperationPostClarification {
functions [DetermineNextStep]
args {
thread #"
[
{"type":"user_input","data":"can you multiply 3 and FD*(#F&& ?"},
{"type":"tool_call","data":{"intent":"request_more_information","message":"It seems like there was a typo or mistake in your request. Could you please clarify or provide the correct numbers you would like to multiply?"}},
{"type":"human_response","data":"lets try 12 instead"},
]
"#
}
@@assert(intent, {{this.intent == "multiply"}})
@@assert(a, {{this.b == 12}})
@@assert(b, {{this.a == 3}})
}

View File

@@ -1,87 +0,0 @@
import { AddTool, SubtractTool, DivideTool, MultiplyTool, b } from "../baml_client";
export interface Event {
type: string
data: any;
}
export class Thread {
events: Event[] = [];
constructor(events: Event[]) {
this.events = events;
}
serializeForLLM() {
// can change this to whatever custom serialization you want to do, XML, etc
// e.g. https://github.com/got-agents/agents/blob/59ebbfa236fc376618f16ee08eb0f3bf7b698892/linear-assistant-ts/src/agent.ts#L66-L105
return JSON.stringify(this.events, null, 2);
}
}
export type CalculatorTool = AddTool | SubtractTool | MultiplyTool | DivideTool;
export async function handleNextStep(nextStep: CalculatorTool, thread: Thread): Promise<Thread> {
let result: number;
switch (nextStep.intent) {
case "add":
result = nextStep.a + nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "subtract":
result = nextStep.a - nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "multiply":
result = nextStep.a * nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "divide":
result = nextStep.a / nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
}
}
export async function agentLoop(thread: Thread): Promise<Thread> {
while (true) {
const nextStep = await b.DetermineNextStep(thread.serializeForLLM());
console.log("nextStep", nextStep);
thread.events.push({
"type": "tool_call",
"data": nextStep
});
switch (nextStep.intent) {
case "done_for_now":
case "request_more_information":
// response to human, return the thread
return thread;
case "add":
case "subtract":
case "multiply":
case "divide":
thread = await handleNextStep(nextStep, thread);
}
}
}

View File

@@ -1,99 +0,0 @@
import { AddTool, SubtractTool, DivideTool, MultiplyTool, b } from "../baml_client";
export interface Event {
type: string
data: any;
}
export class Thread {
events: Event[] = [];
constructor(events: Event[]) {
this.events = events;
}
serializeForLLM() {
return this.events.map(e => this.serializeOneEvent(e)).join("\n");
}
trimLeadingWhitespace(s: string) {
return s.replace(/^[ \t]+/gm, '');
}
serializeOneEvent(e: Event) {
return this.trimLeadingWhitespace(`
<${e.data?.intent || e.type}>
${
typeof e.data !== 'object' ? e.data :
Object.keys(e.data).filter(k => k !== 'intent').map(k => `${k}: ${e.data[k]}`).join("\n")}
</${e.data?.intent || e.type}>
`)
}
}
export type CalculatorTool = AddTool | SubtractTool | MultiplyTool | DivideTool;
export async function handleNextStep(nextStep: CalculatorTool, thread: Thread): Promise<Thread> {
let result: number;
switch (nextStep.intent) {
case "add":
result = nextStep.a + nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "subtract":
result = nextStep.a - nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "multiply":
result = nextStep.a * nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "divide":
result = nextStep.a / nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
}
}
export async function agentLoop(thread: Thread): Promise<Thread> {
while (true) {
const nextStep = await b.DetermineNextStep(thread.serializeForLLM());
console.log("nextStep", nextStep);
thread.events.push({
"type": "tool_call",
"data": nextStep
});
switch (nextStep.intent) {
case "done_for_now":
case "request_more_information":
// response to human, return the thread
return thread;
case "add":
case "subtract":
case "multiply":
case "divide":
thread = await handleNextStep(nextStep, thread);
}
}
}

View File

@@ -1,153 +0,0 @@
// human tools are async requests to a human
type HumanTools = ClarificationRequest | DoneForNow
class ClarificationRequest {
intent "request_more_information" @description("you can request more information from me")
message string
}
class DoneForNow {
intent "done_for_now"
message string @description(#"
message to send to the user about the work that was done.
"#)
}
function DetermineNextStep(
thread: string
) -> HumanTools | CalculatorTools {
client "openai/gpt-4o"
prompt #"
{{ _.role("system") }}
You are a helpful assistant that can help with tasks.
{{ _.role("user") }}
You are working on the following thread:
{{ thread }}
What should the next step be?
{{ ctx.output_format }}
Always think about what to do next first, like:
- ...
- ...
- ...
{...} // schema
"#
}
test HelloWorld {
functions [DetermineNextStep]
args {
thread #"
<user_input>
hello!
</user_input>
"#
}
@@assert(intent, {{this.intent == "request_more_information"}})
}
test MathOperation {
functions [DetermineNextStep]
args {
thread #"
<user_input>
can you multiply 3 and 4?
</user_input>
"#
}
@@assert(intent, {{this.intent == "multiply"}})
}
test LongMath {
functions [DetermineNextStep]
args {
thread #"
<user_input>
can you multiply 3 and 4, then divide the result by 2 and then add 12 to that result?
</user_input>
<multiply>
a: 3
b: 4
</multiply>
<tool_response>
12
</tool_response>
<divide>
a: 12
b: 2
</divide>
<tool_response>
6
</tool_response>
<add>
a: 6
b: 12
</add>
<tool_response>
18
</tool_response>
"#
}
@@assert(intent, {{this.intent == "done_for_now"}})
@@assert(answer, {{"18" in this.message}})
}
test MathOperationWithClarification {
functions [DetermineNextStep]
args {
thread #"
<user_input>
can you multiply 3 and fe1iiaff10
</user_input>
"#
}
@@assert(intent, {{this.intent == "request_more_information"}})
}
test MathOperationPostClarification {
functions [DetermineNextStep]
args {
thread #"
<user_input>
can you multiply 3 and FD*(#F&& ?
</user_input>
<request_more_information>
message: It seems like there was a typo or mistake in your request. Could you please clarify or provide the correct numbers you would like to multiply?
</request_more_information>
<human_response>
lets try 12 instead
</human_response>
"#
}
@@assert(intent, {{this.intent == "multiply"}})
@@assert(b, {{this.a == 3}})
@@assert(a, {{this.b == 12}})
}

View File

@@ -1,28 +0,0 @@
import express from 'express';
import { Thread, agentLoop } from '../src/agent';
const app = express();
app.use(express.json());
// POST /thread - Start new thread
app.post('/thread', async (req, res) => {
const thread = new Thread([{
type: "user_input",
data: req.body.message
}]);
const result = await agentLoop(thread);
res.json(result);
});
// GET /thread/:id - Get thread status
app.get('/thread/:id', (req, res) => {
// optional - add state
res.status(404).json({ error: "Not implemented yet" });
});
const port = process.env.PORT || 3000;
app.listen(port, () => {
console.log(`Server running on port ${port}`);
});
export { app };

View File

@@ -1,71 +0,0 @@
import express from 'express';
import { Thread, agentLoop } from '../src/agent';
import { ThreadStore } from '../src/state';
const app = express();
app.use(express.json());
const store = new ThreadStore();
// POST /thread - Start new thread
app.post('/thread', async (req, res) => {
const thread = new Thread([{
type: "user_input",
data: req.body.message
}]);
const threadId = store.create(thread);
const result = await agentLoop(thread);
// If clarification is needed, include the response URL
const lastEvent = result.events[result.events.length - 1];
if (lastEvent.data.intent === 'request_more_information') {
lastEvent.data.response_url = `/thread/${threadId}/response`;
}
store.update(threadId, result);
res.json({
thread_id: threadId,
...result
});
});
// GET /thread/:id - Get thread status
app.get('/thread/:id', (req, res) => {
const thread = store.get(req.params.id);
if (!thread) {
return res.status(404).json({ error: "Thread not found" });
}
res.json(thread);
});
// POST /thread/:id/response - Handle clarification response
app.post('/thread/:id/response', async (req, res) => {
const thread = store.get(req.params.id);
if (!thread) {
return res.status(404).json({ error: "Thread not found" });
}
thread.events.push({
type: "human_response",
data: req.body.message
});
const result = await agentLoop(thread);
// If another clarification is needed, include the response URL
const lastEvent = result.events[result.events.length - 1];
if (lastEvent.data.intent === 'request_more_information') {
lastEvent.data.response_url = `/thread/${req.params.id}/response`;
}
store.update(req.params.id, result);
res.json(result);
});
const port = process.env.PORT || 3000;
app.listen(port, () => {
console.log(`Server running on port ${port}`);
});
export { app };

View File

@@ -1,23 +0,0 @@
import crypto from 'crypto';
import { Thread } from '../src/agent';
// you can replace this with any simple state management,
// e.g. redis, sqlite, postgres, etc
export class ThreadStore {
private threads: Map<string, Thread> = new Map();
create(thread: Thread): string {
const id = crypto.randomUUID();
this.threads.set(id, thread);
return id;
}
get(id: string): Thread | undefined {
return this.threads.get(id);
}
update(id: string, thread: Thread): void {
this.threads.set(id, thread);
}
}

View File

@@ -1,111 +0,0 @@
import { AddTool, SubtractTool, DivideTool, MultiplyTool, b } from "../baml_client";
export interface Event {
type: string
data: any;
}
export class Thread {
events: Event[] = [];
constructor(events: Event[]) {
this.events = events;
}
serializeForLLM() {
return this.events.map(e => this.serializeOneEvent(e)).join("\n");
}
trimLeadingWhitespace(s: string) {
return s.replace(/^[ \t]+/gm, '');
}
serializeOneEvent(e: Event) {
return this.trimLeadingWhitespace(`
<${e.data?.intent || e.type}>
${
typeof e.data !== 'object' ? e.data :
Object.keys(e.data).filter(k => k !== 'intent').map(k => `${k}: ${e.data[k]}`).join("\n")}
</${e.data?.intent || e.type}>
`)
}
awaitingHumanResponse(): boolean {
const lastEvent = this.events[this.events.length - 1];
return ['request_more_information', 'done_for_now'].includes(lastEvent.data.intent);
}
awaitingHumanApproval(): boolean {
const lastEvent = this.events[this.events.length - 1];
return lastEvent.data.intent === 'divide';
}
}
export type CalculatorTool = AddTool | SubtractTool | MultiplyTool | DivideTool;
export async function handleNextStep(nextStep: CalculatorTool, thread: Thread): Promise<Thread> {
let result: number;
switch (nextStep.intent) {
case "add":
result = nextStep.a + nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "subtract":
result = nextStep.a - nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "multiply":
result = nextStep.a * nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "divide":
result = nextStep.a / nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
}
}
export async function agentLoop(thread: Thread): Promise<Thread> {
while (true) {
const nextStep = await b.DetermineNextStep(thread.serializeForLLM());
console.log("nextStep", nextStep);
thread.events.push({
"type": "tool_call",
"data": nextStep
});
switch (nextStep.intent) {
case "done_for_now":
case "request_more_information":
// response to human, return the thread
return thread;
case "divide":
// divide is scary, return it for human approval
return thread;
case "add":
case "subtract":
case "multiply":
thread = await handleNextStep(nextStep, thread);
}
}
}

View File

@@ -1,111 +0,0 @@
import express from 'express';
import { Thread, agentLoop, handleNextStep } from '../src/agent';
import { ThreadStore } from '../src/state';
const app = express();
app.use(express.json());
const store = new ThreadStore();
// POST /thread - Start new thread
app.post('/thread', async (req, res) => {
const thread = new Thread([{
type: "user_input",
data: req.body.message
}]);
const threadId = store.create(thread);
const newThread = await agentLoop(thread);
store.update(threadId, newThread);
const lastEvent = newThread.events[newThread.events.length - 1];
// If we exited the loop, include the response URL so the client can
// push a new message onto the thread
lastEvent.data.response_url = `/thread/${threadId}/response`;
console.log("returning last event from endpoint", lastEvent);
res.json({
thread_id: threadId,
...newThread
});
});
// GET /thread/:id - Get thread status
app.get('/thread/:id', (req, res) => {
const thread = store.get(req.params.id);
if (!thread) {
return res.status(404).json({ error: "Thread not found" });
}
res.json(thread);
});
type ApprovalPayload = {
type: "approval";
approved: boolean;
comment?: string;
}
type ResponsePayload = {
type: "response";
response: string;
}
type Payload = ApprovalPayload | ResponsePayload;
// POST /thread/:id/response - Handle clarification response
app.post('/thread/:id/response', async (req, res) => {
let thread = store.get(req.params.id);
if (!thread) {
return res.status(404).json({ error: "Thread not found" });
}
const body: Payload = req.body;
let lastEvent = thread.events[thread.events.length - 1];
if (thread.awaitingHumanResponse() && body.type === 'response') {
thread.events.push({
type: "human_response",
data: body.response
});
} else if (thread.awaitingHumanApproval() && body.type === 'approval' && !body.approved) {
// push feedback onto the thread
thread.events.push({
type: "tool_response",
data: `user denied the operation with feedback: "${body.comment}"`
});
} else if (thread.awaitingHumanApproval() && body.type === 'approval' && body.approved) {
// approved, run the tool, pushing results onto the thread
await handleNextStep(lastEvent.data, thread);
} else {
res.status(400).json({
error: "Invalid request: " + body.type,
awaitingHumanResponse: thread.awaitingHumanResponse(),
awaitingHumanApproval: thread.awaitingHumanApproval()
});
return;
}
// loop until stop event
const result = await agentLoop(thread);
store.update(req.params.id, result);
lastEvent = result.events[result.events.length - 1];
lastEvent.data.response_url = `/thread/${req.params.id}/response`;
console.log("returning last event from endpoint", lastEvent);
res.json(result);
});
const port = process.env.PORT || 3000;
app.listen(port, () => {
console.log(`Server running on port ${port}`);
});
export { app };

View File

@@ -1,111 +0,0 @@
import { AddTool, SubtractTool, DivideTool, MultiplyTool, b } from "../baml_client";
export interface Event {
type: string
data: any;
}
export class Thread {
events: Event[] = [];
constructor(events: Event[]) {
this.events = events;
}
serializeForLLM() {
return this.events.map(e => this.serializeOneEvent(e)).join("\n");
}
trimLeadingWhitespace(s: string) {
return s.replace(/^[ \t]+/gm, '');
}
serializeOneEvent(e: Event) {
return this.trimLeadingWhitespace(`
<${e.data?.intent || e.type}>
${
typeof e.data !== 'object' ? e.data :
Object.keys(e.data).filter(k => k !== 'intent').map(k => `${k}: ${e.data[k]}`).join("\n")}
</${e.data?.intent || e.type}>
`)
}
awaitingHumanResponse(): boolean {
const lastEvent = this.events[this.events.length - 1];
return ['request_more_information', 'done_for_now'].includes(lastEvent.data.intent);
}
awaitingHumanApproval(): boolean {
const lastEvent = this.events[this.events.length - 1];
return lastEvent.data.intent === 'divide';
}
}
export type CalculatorTool = AddTool | SubtractTool | MultiplyTool | DivideTool;
export async function handleNextStep(nextStep: CalculatorTool, thread: Thread): Promise<Thread> {
let result: number;
switch (nextStep.intent) {
case "add":
result = nextStep.a + nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "subtract":
result = nextStep.a - nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "multiply":
result = nextStep.a * nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
case "divide":
result = nextStep.a / nextStep.b;
console.log("tool_response", result);
thread.events.push({
"type": "tool_response",
"data": result
});
return thread;
}
}
export async function agentLoop(thread: Thread): Promise<Thread> {
while (true) {
const nextStep = await b.DetermineNextStep(thread.serializeForLLM());
console.log("nextStep", nextStep);
thread.events.push({
"type": "tool_call",
"data": nextStep
});
switch (nextStep.intent) {
case "done_for_now":
case "request_more_information":
// response to human, return the next step object
return thread;
case "divide":
// divide is scary, return it for human approval
return thread;
case "add":
case "subtract":
case "multiply":
thread = await handleNextStep(nextStep, thread);
}
}
}

View File

@@ -1,120 +0,0 @@
// cli.ts lets you invoke the agent loop from the command line
import { humanlayer } from "humanlayer";
import { agentLoop, Thread, Event } from "../src/agent";
export async function cli() {
// Get command line arguments, skipping the first two (node and script name)
const args = process.argv.slice(2);
if (args.length === 0) {
console.error("Error: Please provide a message as a command line argument");
process.exit(1);
}
// Join all arguments into a single message
const message = args.join(" ");
// Create a new thread with the user's message as the initial event
const thread = new Thread([{ type: "user_input", data: message }]);
// Run the agent loop with the thread
let newThread = await agentLoop(thread);
let lastEvent = newThread.events.slice(-1)[0];
let needsResponse =
newThread.awaitingHumanResponse() ||
newThread.awaitingHumanApproval();
while (needsResponse) {
lastEvent = newThread.events.slice(-1)[0];
const responseEvent = await askHuman(lastEvent);
thread.events.push(responseEvent);
newThread = await agentLoop(thread);
// determine if we should loop or if we're done
needsResponse = newThread.awaitingHumanResponse()
|| newThread.awaitingHumanApproval();
}
// print the final result
// optional - you could loop here too
console.log(lastEvent.data.message);
process.exit(0);
}
async function askHuman(lastEvent: Event): Promise<Event> {
if (process.env.HUMANLAYER_API_KEY) {
return await askHumanEmail(lastEvent);
} else {
return await askHumanCLI(lastEvent.data.message);
}
}
async function askHumanCLI(message: string): Promise<Event> {
const readline = require('readline').createInterface({
input: process.stdin,
output: process.stdout
});
return new Promise((resolve) => {
readline.question(`${message}\n> `, (answer: string) => {
resolve({ type: "human_response", data: answer });
});
});
}
export async function askHumanEmail(lastEvent: Event): Promise<Event> {
if (!process.env.HUMANLAYER_EMAIL) {
throw new Error("missing or invalid parameters: HUMANLAYER_EMAIL");
}
const hl = humanlayer({ //reads apiKey from env
// name of this agent
runId: "cli-agent",
contactChannel: {
// agent should request permission via email
email: {
address: process.env.HUMANLAYER_EMAIL,
}
}
})
if (lastEvent.data.intent === "request_more_information") {
const response = await hl.fetchHumanResponse({
spec: {
msg: lastEvent.data.message
}
})
return {
"type": "tool_response",
"data": response
}
}
if (lastEvent.data.intent === "divide") {
// fetch approval synchronously
const response = await hl.fetchHumanApproval({
spec: {
fn: "divide",
kwargs: {
a: lastEvent.data.a,
b: lastEvent.data.b
}
}
})
if (response.approved) {
const result = lastEvent.data.a / lastEvent.data.b;
console.log("tool_response", result);
return {
"type": "tool_response",
"data": result
};
} else {
return {
"type": "tool_response",
"data": `user denied operation ${lastEvent.data.intent}`
};
}
}
throw new Error(`unknown tool: ${lastEvent.data.intent}`)
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 162 KiB

View File

@@ -1,128 +0,0 @@
// cli.ts lets you invoke the agent loop from the command line
import { humanlayer } from "humanlayer";
import { agentLoop, Thread, Event } from "../src/agent";
export async function cli() {
// Get command line arguments, skipping the first two (node and script name)
const args = process.argv.slice(2);
if (args.length === 0) {
console.error("Error: Please provide a message as a command line argument");
process.exit(1);
}
// Join all arguments into a single message
const message = args.join(" ");
// Create a new thread with the user's message as the initial event
const thread = new Thread([{ type: "user_input", data: message }]);
// Run the agent loop with the thread
let newThread = await agentLoop(thread);
let lastEvent = newThread.events.slice(-1)[0];
let needsResponse =
newThread.awaitingHumanResponse() ||
newThread.awaitingHumanApproval();
while (needsResponse) {
lastEvent = newThread.events.slice(-1)[0];
const responseEvent = await askHuman(lastEvent);
thread.events.push(responseEvent);
newThread = await agentLoop(thread);
// determine if we should loop or if we're done
needsResponse = newThread.awaitingHumanResponse()
|| newThread.awaitingHumanApproval();
}
// print the final result
// optional - you could loop here too
console.log(lastEvent.data.message);
process.exit(0);
}
async function askHuman(lastEvent: Event): Promise<Event> {
if (process.env.HUMANLAYER_API_KEY) {
return await askHumanEmail(lastEvent);
} else {
return await askHumanCLI(lastEvent.data.message);
}
}
async function askHumanCLI(message: string): Promise<Event> {
const readline = require('readline').createInterface({
input: process.stdin,
output: process.stdout
});
return new Promise((resolve) => {
readline.question(`${message}\n> `, (answer: string) => {
resolve({ type: "human_response", data: answer });
});
});
}
async function askHumanEmail(lastEvent: Event): Promise<Event> {
if (!process.env.HUMANLAYER_EMAIL) {
throw new Error("missing or invalid parameters: HUMANLAYER_EMAIL");
}
const hl = humanlayer({ //reads apiKey from env
// name of this agent
runId: "cli-agent",
contactChannel: {
// agent should request permission via email
email: {
address: process.env.HUMANLAYER_EMAIL,
// custom email body - jinja
template: `
agent {{ event.run_id }} is requesting approval for {{event.spec.fn}}
with args: {{event.spec.kwargs}}
<br><br>
reply to this email to approve
`
}
}
})
if (lastEvent.data.intent === "request_more_information") {
const response = await hl.fetchHumanResponse({
spec: {
msg: lastEvent.data.message
}
})
return {
"type": "tool_response",
"data": response
}
}
if (lastEvent.data.intent === "divide") {
// fetch approval synchronously
const response = await hl.fetchHumanApproval({
spec: {
fn: "divide",
kwargs: {
a: lastEvent.data.a,
b: lastEvent.data.b
}
}
})
if (response.approved) {
const result = lastEvent.data.a / lastEvent.data.b;
console.log("tool_response", result);
return {
"type": "tool_response",
"data": result
};
} else {
return {
"type": "tool_response",
"data": `user denied operation ${lastEvent.data.intent}
with feedback: ${response.comment}`
};
}
}
throw new Error(`unknown tool: ${lastEvent.data.intent}`)
}

View File

@@ -1,140 +0,0 @@
import express from 'express';
import { Thread, agentLoop, handleNextStep } from '../src/agent';
import { ThreadStore } from '../src/state';
import { V1Beta2EmailEventReceived } from 'humanlayer';
const app = express();
app.use(express.json());
const store = new ThreadStore();
// POST /thread - Start new thread
app.post('/thread', async (req, res) => {
const thread = new Thread([{
type: "user_input",
data: req.body.message
}]);
const threadId = store.create(thread);
const newThread = await agentLoop(thread);
store.update(threadId, newThread);
const lastEvent = newThread.events[newThread.events.length - 1];
// If we exited the loop, include the response URL so the client can
// push a new message onto the thread
lastEvent.data.response_url = `/thread/${threadId}/response`;
res.json({
thread_id: threadId,
...newThread
});
});
// GET /thread/:id - Get thread status
app.get('/thread/:id', (req, res) => {
const thread = store.get(req.params.id);
if (!thread) {
return res.status(404).json({ error: "Thread not found" });
}
res.json(thread);
});
type ApprovalPayload = {
type: "approval";
approved: boolean;
comment?: string;
}
type ResponsePayload = {
type: "response";
response: string;
}
type Payload = ApprovalPayload | ResponsePayload;
// POST /thread/:id/response - Handle clarification response
app.post('/thread/:id/response', async (req, res) => {
const thread = store.get(req.params.id);
if (!thread) {
return res.status(404).json({ error: "Thread not found" });
}
const body: Payload = req.body;
let lastEvent = thread.events[thread.events.length - 1];
if (thread.awaitingHumanResponse() && body.type === 'response') {
thread.events.push({
type: "human_response",
data: body.response
});
} else if (thread.awaitingHumanApproval() && body.type === 'approval' && !body.approved) {
// push feedback onto the thread
thread.events.push({
type: "tool_response",
data: `user denied the operation with feedback: "${body.comment}"`
});
} else if (thread.awaitingHumanApproval() && body.type === 'approval' && !body.approved) {
// approved, run the tool, pushing results onto the thread
await handleNextStep(lastEvent, thread);
} else {
res.status(400).json({
error: "Invalid request: " + body.type,
awaitingHumanResponse: thread.awaitingHumanResponse(),
awaitingHumanApproval: thread.awaitingHumanApproval()
});
}
// loop until stop event
const result = await agentLoop(thread);
store.update(req.params.id, result);
lastEvent = result.events[result.events.length - 1];
lastEvent.data.response_url = `/thread/${req.params.id}/response`;
res.json(result);
});
app.post('/webhook', async (req, res) => {
//todo verify webhook
const payload: V1Beta2EmailEventReceived = req.body
const { subject, body, to_address, from_address} = payload.event;
const thread = new Thread([{
type: "user_input",
data: {
subject,
body,
to_address,
from_address,
}
}]);
const threadId = store.create(thread);
const newThread = await agentLoop(thread);
store.update(threadId, newThread);
const lastEvent = newThread.events[newThread.events.length - 1];
// don't return any content, we sent the next step to a human
res.json({ status: "ok" });
})
const port = process.env.PORT || 3000;
app.listen(port, () => {
console.log(`Server running on port ${port}`);
});
export { app };