mirror of
https://github.com/humanlayer/12-factor-agents.git
synced 2025-08-20 18:59:53 +03:00
remove em
This commit is contained in:
@@ -1,49 +0,0 @@
|
||||
class DoneForNow {
|
||||
intent "done_for_now"
|
||||
message string
|
||||
}
|
||||
|
||||
client<llm> Qwen3 {
|
||||
provider "openai-generic"
|
||||
options {
|
||||
base_url "https://model-4w7jrl6w.api.baseten.co/environments/production/sync/v1"
|
||||
api_key env.BASETEN_API_KEY
|
||||
}
|
||||
}
|
||||
|
||||
function DetermineNextStep(
|
||||
thread: string
|
||||
) -> DoneForNow {
|
||||
client Qwen3
|
||||
|
||||
// use /nothink for now because the thinking tokens (or streaming thereof) screw with baml (i think (no pun intended))
|
||||
prompt #"
|
||||
{{ _.role("system") }}
|
||||
|
||||
/nothink
|
||||
|
||||
You are a helpful assistant that can help with tasks.
|
||||
|
||||
{{ _.role("user") }}
|
||||
|
||||
You are working on the following thread:
|
||||
|
||||
{{ thread }}
|
||||
|
||||
What should the next step be?
|
||||
|
||||
{{ ctx.output_format }}
|
||||
"#
|
||||
}
|
||||
|
||||
test HelloWorld {
|
||||
functions [DetermineNextStep]
|
||||
args {
|
||||
thread #"
|
||||
{
|
||||
"type": "user_input",
|
||||
"data": "hello!"
|
||||
}
|
||||
"#
|
||||
}
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
// Learn more about clients at https://docs.boundaryml.com/docs/snippets/clients/overview
|
||||
|
||||
client<llm> CustomGPT4o {
|
||||
provider openai
|
||||
options {
|
||||
model "gpt-4o"
|
||||
api_key env.OPENAI_API_KEY
|
||||
}
|
||||
}
|
||||
|
||||
client<llm> CustomGPT4oMini {
|
||||
provider openai
|
||||
retry_policy Exponential
|
||||
options {
|
||||
model "gpt-4o-mini"
|
||||
api_key env.OPENAI_API_KEY
|
||||
}
|
||||
}
|
||||
|
||||
client<llm> CustomSonnet {
|
||||
provider anthropic
|
||||
options {
|
||||
model "claude-3-5-sonnet-20241022"
|
||||
api_key env.ANTHROPIC_API_KEY
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
client<llm> CustomHaiku {
|
||||
provider anthropic
|
||||
retry_policy Constant
|
||||
options {
|
||||
model "claude-3-haiku-20240307"
|
||||
api_key env.ANTHROPIC_API_KEY
|
||||
}
|
||||
}
|
||||
|
||||
// https://docs.boundaryml.com/docs/snippets/clients/round-robin
|
||||
client<llm> CustomFast {
|
||||
provider round-robin
|
||||
options {
|
||||
// This will alternate between the two clients
|
||||
strategy [CustomGPT4oMini, CustomHaiku]
|
||||
}
|
||||
}
|
||||
|
||||
// https://docs.boundaryml.com/docs/snippets/clients/fallback
|
||||
client<llm> OpenaiFallback {
|
||||
provider fallback
|
||||
options {
|
||||
// This will try the clients in order until one succeeds
|
||||
strategy [CustomGPT4oMini, CustomGPT4oMini]
|
||||
}
|
||||
}
|
||||
|
||||
// https://docs.boundaryml.com/docs/snippets/clients/retry
|
||||
retry_policy Constant {
|
||||
max_retries 3
|
||||
// Strategy is optional
|
||||
strategy {
|
||||
type constant_delay
|
||||
delay_ms 200
|
||||
}
|
||||
}
|
||||
|
||||
retry_policy Exponential {
|
||||
max_retries 2
|
||||
// Strategy is optional
|
||||
strategy {
|
||||
type exponential_backoff
|
||||
delay_ms 300
|
||||
multiplier 1.5
|
||||
max_delay_ms 10000
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
// This helps use auto generate libraries you can use in the language of
|
||||
// your choice. You can have multiple generators if you use multiple languages.
|
||||
// Just ensure that the output_dir is different for each generator.
|
||||
generator target {
|
||||
// Valid values: "python/pydantic", "typescript", "ruby/sorbet", "rest/openapi"
|
||||
output_type "typescript"
|
||||
|
||||
// Where the generated code will be saved (relative to baml_src/)
|
||||
output_dir "../"
|
||||
|
||||
// The version of the BAML package you have installed (e.g. same version as your baml-py or @boundaryml/baml).
|
||||
// The BAML VSCode extension version should also match this version.
|
||||
version "0.88.0"
|
||||
|
||||
// Valid values: "sync", "async"
|
||||
// This controls what `b.FunctionName()` will be (sync or async).
|
||||
default_client_mode async
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
import { b } from "../baml_client";
|
||||
|
||||
// tool call or a respond to human tool
|
||||
type AgentResponse = Awaited<ReturnType<typeof b.DetermineNextStep>>;
|
||||
|
||||
export interface Event {
|
||||
type: string
|
||||
data: any;
|
||||
}
|
||||
|
||||
export class Thread {
|
||||
events: Event[] = [];
|
||||
|
||||
constructor(events: Event[]) {
|
||||
this.events = events;
|
||||
}
|
||||
|
||||
serializeForLLM() {
|
||||
// can change this to whatever custom serialization you want to do, XML, etc
|
||||
// e.g. https://github.com/got-agents/agents/blob/59ebbfa236fc376618f16ee08eb0f3bf7b698892/linear-assistant-ts/src/agent.ts#L66-L105
|
||||
return JSON.stringify(this.events);
|
||||
}
|
||||
}
|
||||
|
||||
// right now this just runs one turn with the LLM, but
|
||||
// we'll update this function to handle all the agent logic
|
||||
export async function agentLoop(thread: Thread): Promise<AgentResponse> {
|
||||
const nextStep = await b.DetermineNextStep(thread.serializeForLLM());
|
||||
return nextStep;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
// cli.ts lets you invoke the agent loop from the command line
|
||||
|
||||
import { agentLoop, Thread, Event } from "./agent";
|
||||
|
||||
export async function cli() {
|
||||
// Get command line arguments, skipping the first two (node and script name)
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.error("Error: Please provide a message as a command line argument");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Join all arguments into a single message
|
||||
const message = args.join(" ");
|
||||
|
||||
// Create a new thread with the user's message as the initial event
|
||||
const thread = new Thread([{ type: "user_input", data: message }]);
|
||||
|
||||
// Run the agent loop with the thread
|
||||
const result = await agentLoop(thread);
|
||||
console.log(result);
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
import { cli } from "./cli"
|
||||
|
||||
async function hello(): Promise<void> {
|
||||
console.log('hello, world!')
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await cli()
|
||||
}
|
||||
|
||||
main().catch(console.error)
|
||||
Reference in New Issue
Block a user