feat: Add Vercel AI SDK integration and improve timeout handling

**What I did**
- Added Vercel AI SDK integration for HumanLayer
- Improved HTTP timeout handling in Python and TypeScript SDKs
- Updated examples and documentation

**How I did it**
- Created new humanlayer-ts-vercel-ai-sdk package
- Added configurable timeout to Python and TypeScript SDKs
- Updated examples to use latest SDK versions
- Added new examples for Vercel AI SDK integration

**How to verify it**
- Run examples in ts_vercel_ai_sdk directory
- Check updated timeout handling in Python and TypeScript SDKs

**Description for the changelog**
Add Vercel AI SDK integration and improve timeout handling across SDKs
This commit is contained in:
dexhorthy
2025-01-09 14:15:32 -08:00
parent c474a86039
commit 3cf8c9cac2
28 changed files with 2344 additions and 194 deletions

View File

@@ -4,31 +4,29 @@
The most basic examples are:
- [openai_client](./openai_client) - basic example of HumanLayer using raw OpenAI client and function calling.
- [openai_client](./openai_client) - basic example of HumanLayer using raw OpenAI client and function calling.
- [langchain](./langchain) - basic langchain examples, includes the most complete set of examples including `human as tool` and `email` channel features
- [controlflow](./controlflow) - basic controlflow example
- [crewai](./crewai) - basic crewai example
- [fastapi](./fastapi) - basic fastapi server showcasing `AsyncHumanLayer` for asyncio apps
- [curl](./curl) - interact with the HumanLayer API using curl
### More advanced examples
### More advanced examples
These examples include more end-to-end API examples, using webservers like flask and fastapi, and using some more advanced [state management](https://humanlayer.dev/docs/core/state-management) techniques.
- [openai_client/03-imperative_fetch.py](./openai_client/03-imperative_fetch.py) - showing how you can use lower-level SDK methods to interact with the HumanLayer API.
- [fastapi-webhooks](./fastapi-webhooks) - fastapi server that leverage humanlayer webhooks (e.g. with ngrok locally) to fire-and-forget function calls, and handle human approval events as they are received
- [fastapi-email](./fastapi-email) - two end-to-end examples of a workflow designed to be initiated via email, where approvals and requests from the agents are sent as replies on the same email thread. Includes two versions:
- one where the fastapi server manages state
- one where the fastapi server leverages the `HumanLayer` state management to manage state
- one where the fastapi server manages state
- one where the fastapi server leverages the `HumanLayer` state management to manage state
### TypeScript examples
- [ts_openai_client](./ts_openai_client) - basic example of HumanLayer using raw OpenAI client and function calling
- [ts_vercel_ai_sdk](./ts_vercel_ai_sdk) - example showcasing HumanLayer + Vercel AI SDK
- [ts_langchain](./ts_langchain) - basic example of HumanLayer using LangchainJS
- [ts_email_classifier](./ts_email_classifier) - basic example of various classification/labeling workflows, using an llm to label emails and then providing sync or async mechanisms for human input on classifications
### Other LLMs

View File

@@ -2,6 +2,6 @@ langchain
langchain-openai
python-dotenv
# install humanlayer
humanlayer==0.7.0
# humanlayer==0.7.0
# or if you want an editable version
#-e .
-e .

View File

@@ -1,159 +0,0 @@
# contrived example of using an API token / python lib to approve a function call
import json
import logging
import time
from dotenv import load_dotenv
from openai import OpenAI
from humanlayer import HumanLayer
from humanlayer.core.models import FunctionCallSpec, FunctionCallStatus
load_dotenv()
hl = HumanLayer(
verbose=True,
# run_id is optional -it can be used to identify the agent in approval history
run_id="openai-imperative-fetch-04",
)
PROMPT = "multiply 2 and 5, then add 32 to the result"
def add(x: int, y: int) -> int:
"""Add two numbers together."""
return x + y
def multiply(x: int, y: int) -> int:
"""multiply two numbers"""
return x * y
math_tools_openai = [
{
"type": "function",
"function": {
"name": "add",
"description": "Add two numbers together.",
"parameters": {
"type": "object",
"properties": {
"x": {"type": "number"},
"y": {"type": "number"},
},
"required": ["x", "y"],
},
},
},
{
"type": "function",
"function": {
"name": "multiply",
"description": "multiply two numbers",
"parameters": {
"type": "object",
"properties": {
"x": {"type": "number"},
"y": {"type": "number"},
},
"required": ["x", "y"],
},
},
},
]
logger = logging.getLogger(__name__)
def run_chain(prompt: str, tools_openai: list[dict]) -> str:
client = OpenAI()
messages = [{"role": "user", "content": prompt}]
response = client.chat.completions.create(
model="gpt-4o",
messages=messages,
tools=tools_openai,
tool_choice="auto",
)
while response.choices[0].finish_reason != "stop":
response_message = response.choices[0].message
tool_calls = response_message.tool_calls
if tool_calls:
messages.append(response_message) # extend conversation with assistant's reply
logger.info(
"last message led to %s tool calls: %s",
len(tool_calls),
[(tool_call.function.name, tool_call.function.arguments) for tool_call in tool_calls],
)
for tool_call in tool_calls:
function_name = tool_call.function.name
function_args = json.loads(tool_call.function.arguments)
function_response_json: str
# who needs hash maps? switch statements are the purest form of polymorphism
if function_name == "add":
logger.info("CALL tool %s with %s", function_name, function_args)
function_result = add(**function_args)
function_response_json = json.dumps(function_result)
# you're in charge now. go forth and multiply
elif function_name == "multiply":
logger.info("CALL tool %s with %s", function_name, function_args)
call = hl.create_function_call(
spec=FunctionCallSpec(
fn="add",
kwargs=function_args,
),
# call_id is optional but you can supply it if you want,
# in this case the openai tool_call_id is a natural choice
call_id=tool_call.id,
)
# loop until the call is approved
while (not call.status) or (call.status.approved is None):
time.sleep(5)
call = hl.get_function_call(call_id=tool_call.id)
hl.respond_to_function_call(call_id=tool_call.id, status=FunctionCallStatus(approved=True))
call = hl.get_function_call(call_id=tool_call.id)
if call.status.approved:
function_result = multiply(**function_args)
function_response_json = json.dumps(function_result)
else:
function_response_json = json.dumps(
{"error": f"call {call.spec.fn} not approved, comment was {call.status.comment}"}
)
else:
raise Exception(f"unknown function {function_name}") # noqa: TRY002
logger.info(
"tool %s responded with %s",
function_name,
function_response_json[:200],
)
messages.append(
{
"tool_call_id": tool_call.id,
"role": "tool",
"name": function_name,
"content": function_response_json,
}
) # extend conversation with function response
response = client.chat.completions.create(
model="gpt-4o",
messages=messages,
tools=tools_openai,
)
return response.choices[0].message.content
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
result = run_chain(PROMPT, math_tools_openai)
print("\n\n----------Result----------\n\n")
print(result)

View File

@@ -0,0 +1,43 @@
import { config } from "dotenv";
import { HumanLayer } from "humanlayer";
import {
ClassifiedEmail,
classifyEmail,
logEmails,
twoEmailsShuffled,
} from "./common";
config(); // Load environment variables
const hl = new HumanLayer({
verbose: true,
runId: "email-classifier",
});
async function main() {
try {
console.log("\nClassifying emails...\n");
const results: ClassifiedEmail[] = [];
for (const email of twoEmailsShuffled) {
const classification = await classifyEmail(email);
console.log(
`Classification for "${email.subject}" was ${classification}`,
);
results.push({
...email,
classification,
});
}
logEmails(results);
} catch (error) {
console.error("Error:", error);
}
}
main()
.then(console.log)
.catch((e) => {
console.error(e);
process.exit(1);
});

View File

@@ -0,0 +1,80 @@
import { HumanLayer, ResponseOption } from "humanlayer";
import { config } from "dotenv";
import {
Classification,
classificationValues,
ClassifiedEmail,
classifyEmail,
logEmails,
twoEmailsShuffled,
} from "./common";
config(); // Load environment variables
const hl = new HumanLayer({
verbose: true,
runId: "email-classifier",
contactChannel: {
slack: {
channel_or_user_id: "",
context_about_channel_or_user: "",
experimental_slack_blocks: true,
},
},
});
async function main() {
try {
console.log("\nClassifying emails...\n");
const results: ClassifiedEmail[] = [];
for (const email of twoEmailsShuffled) {
const classification = await classifyEmail(email);
console.log(
`Classification for "${email.subject}" was ${classification}, checking with human`,
);
const { subject, body, to, from } = email;
const remainingOptions = classificationValues.filter(
(c) => c !== classification,
);
const responseOptions: ResponseOption[] = remainingOptions.map((c) => ({
name: c,
title: c,
description: `Classify as ${c}`,
prompt_fill: `manual classify: ${c}`,
interactive: false,
}));
// fetch human review as labels are processing
const humanReview = await hl.fetchHumanApproval({
spec: {
fn: "classifyEmail",
kwargs: { to, from, subject, body, classification },
reject_options: responseOptions,
},
});
const humanClassification = humanReview.approved
? classification
: (humanReview.reject_option_name as Classification | null | undefined);
results.push({
...email,
classification,
hasHumanReview: true,
humanComment: humanReview.comment,
humanClassification,
});
}
logEmails(results);
} catch (error) {
console.error("Error:", error);
}
}
main()
.then(console.log)
.catch((e) => {
console.error(e);
process.exit(1);
});

View File

@@ -0,0 +1,202 @@
import OpenAI from "openai";
import { ChatCompletionTool } from "openai/resources";
export const prompt = (
email: Email,
) => `Classify this email into one of these categories:
- read: not directly actionable, but should be read soon
- action: emails requiring specific tasks or responses
- archive: legitimate emails that don't need action or reading
- spam: unsolicited commercial emails, scams, or suspicious messages
Here is the email to classify:
${JSON.stringify(email)}
`; // json is not token-efficient but it will do for now
export type Email = {
id: string;
subject: string;
from: string;
to: string;
body: string;
};
export const logEmails = (emails: ClassifiedEmail[]) => {
console.log("\nResults:\n");
const tableData = emails.map((email) => ({
ID: email.id,
Subject: email.subject,
Classification: email.classification,
"Human Review": email.hasHumanReview ? "✓" : "✗",
"Human Classification": email.humanClassification || "-",
"Human Comment": email.humanComment || "-",
}));
console.table(tableData);
};
export type Classification = "read" | "action" | "archive" | "spam";
export const classificationValues: string[] = [
"read",
"action",
"archive",
"spam",
];
export type ClassifiedEmail = Email & {
classification: Classification;
hasHumanReview?: boolean;
humanComment?: string | null;
humanClassification?: Classification | null;
};
export const emails: Email[] = [
{
id: "email_25b7f8a9d3e4c2_1704836718",
subject: "Exclusive Partnership Opportunity",
from: "marketing@techvendor.com",
to: "me@company.com",
body: `Hi there,
I hope this email finds you well. I wanted to reach out about an exciting opportunity to partner with TechVendor. We're offering exclusive deals on our enterprise software solutions.
Would love to schedule a quick 15-minute call to discuss how we can help optimize your operations.
Best regards,
Marketing Team`,
},
{
id: "email_7c4f9b2e5d8a1_1704836718",
subject: "Team Sync Notes - Product Launch",
from: "sarah@company.com",
to: "team@company.com",
body: `Hey team,
Here are the key points from today's sync:
- Launch date set for March 15th
- Marketing materials due by March 1st
- Beta testing starts next week
- Need volunteers for user interviews
Please review and let me know if I missed anything.
Best,
Sarah`,
},
{
id: "email_3a6d9c4b8e2f5_1704836718",
subject: "Your Account Security",
from: "security@legitbank.com",
to: "me@company.com",
body: `URGENT: Your account requires immediate verification. Click here to confirm your details within 24 hours to avoid service interruption.
If you did not request this verification, please disregard this message.`,
},
{
id: "email_9f2e5d8a1b7c4_1704836718",
subject: "Quick question about API docs",
from: "dev@customer.com",
to: "support@company.com",
body: `Hi there,
I'm trying to implement the authentication flow described in your docs, but I'm getting a 401 error when using the refresh token. Am I missing something?
Here's what I've tried so far:
1. Generated new access token
2. Added Bearer prefix
3. Checked token expiration
Any help would be appreciated!
Thanks`,
},
{
id: "email_4b8e2f5a7d9c3_1704836718",
subject: "Weekly Newsletter - Tech Industry Updates",
from: "newsletter@techdigest.com",
to: "subscribers@techdigest.com",
body: `This Week in Tech:
- AI breakthroughs in medical imaging
- New programming language trends
- Top 10 startup funding rounds
- Industry job openings
Click to read more...`,
},
{
id: "email_1704836718_9f2e5d8a1b7c4",
subject: "Happy New Year!",
from: "newsletter@devpost.com",
to: "subscribers@devpost.com",
body: `Make 2025 your year to shine. Happy New Year! It's time to turn those resolutions into reality. We're here to help you make this year your best hackathon year yet! Maybe you're aiming to master a new programming language, network with amazing developers from around the world, or finally snag that grand prize. Ambitious! We love it. No matter your goals, these hackathons are your ticket to achieving them. We can't wait to see all the incredible software you build this year. We'll be bringing you exciting hackathons all year long. Cheers to a great 2025 together! -Devpost Team `,
},
];
export const classifierToolsOpenai: ChatCompletionTool[] = [
{
type: "function",
function: {
name: "action",
description: "emails requiring specific tasks or responses",
},
},
{
type: "function",
function: {
name: "read",
description: "not directly actionable, but should be read soon",
},
},
{
type: "function",
function: {
name: "archive",
description:
"a task that is not worth reading and should be sent straight to the archive",
},
},
{
type: "function",
function: {
name: "spam",
description:
"unsolicited commercial emails, scams, or suspicious messages",
},
},
];
export const twoEmailsShuffled = emails
.sort(() => Math.random() - 0.5)
.slice(0, 2);
// this should really be written in baml
export async function classifyEmail(email: Email): Promise<Classification> {
const client = new OpenAI();
const messages: Array<OpenAI.Chat.ChatCompletionMessageParam> = [
{ role: "user", content: prompt(email) },
];
const response = await client.chat.completions.create({
model: "gpt-4o-mini",
messages,
tools: classifierToolsOpenai,
tool_choice: "required",
});
const toolCalls = response.choices[0].message.tool_calls;
if (!toolCalls || toolCalls.length === 0) {
console.log(
`No classification received for email ${email}, response: ${JSON.stringify(
response,
)}`,
);
throw new Error("No classification received");
}
if (!classificationValues.includes(toolCalls[0].function.name)) {
console.log("unknown classification received", toolCalls[0].function.name);
throw new Error("Unknown classification received");
}
return toolCalls[0].function.name as Classification;
}

View File

@@ -0,0 +1,4 @@
# copy to .env
OPENAI_API_KEY=
HUMANLAYER_API_KEY=

Binary file not shown.

After

Width:  |  Height:  |  Size: 280 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 224 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 480 KiB

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,37 @@
{
"name": "humanlayer-ts-email-classifier",
"version": "0.1.0",
"description": "example of using humanlayer with openai in typescript",
"main": "index.ts",
"type": "module",
"scripts": {
"build": "tsc",
"no-humans": "tsx --env-file=.env 01-no-humans.ts",
"human-review-sync": "tsx --env-file=.env 02-human-review-sync.ts"
},
"repository": {
"type": "git",
"url": "git+https://github.com/humanlayer/humanlayer.git"
},
"keywords": [
"human-in-the-loop",
"ai-agents",
"tool-use",
"function-calling"
],
"author": "HumanLayer Authors",
"license": "Apache-2.0",
"dependencies": {
"@types/express": "^5.0.0",
"dotenv": "^16.4.7",
"express": "^4.21.2",
"humanlayer": "file:../../humanlayer-ts",
"openai": "^4.0.0"
},
"devDependencies": {
"@types/node": "^20.14.9",
"ts-node": "^10.9.2",
"tsx": "^4.19.0",
"typescript": "^5.5.2"
}
}

View File

@@ -0,0 +1,47 @@
# Task Classifier Example
### Three versions
- [01-no-humans.ts](./01-no-humans.ts) - classify the emails with no human intervention
- [02-humans.ts](./02-human-review-sync.ts) - classify the emails, checking with a human before saving classifications, then print results
- [03-humans-async.ts](./03-humans-async.ts) - classify the emails, print them out, then start a webserver to listen for human overrides. When a human feedback is received, print the updated list.
### Running the examples
```
npm install
npm run no-humans
npm run human-review-sync
npm run human-review-async
```
For all three examples, you'll need to set `OPENAI_API_KEY` in your environment.
For the human-review examples, you'll need to set `HUMANLAYER_API_KEY` in your environment. You can get one at [app.humanlayer.dev](https://app.humanlayer.dev/).
For the `human-review-async` example, you will need to configure HumanLayer to send a [Response Webhook](https://humanlayer.dev/docs/core/response-webhooks) to your local server using ngrok or similar.
### 01-no-humans.ts
In this example, we just classify the emails and print the results of the LLM classification.
![no-humans](./img/no-humans.png)
### 02-human-review-sync.ts
In this example, each email is sent synchronously to a human for review, and then all results are printed at the end.
![human-review-sync](./img/human-review-sync.png)
### 03-human-review-async.ts
In this example, all LLM classifications are computed, and then they are all sent to a human for review. When a human review is completed, it will
be received by a webhook, and the results will be printed out.
![human-review-async](./img/human-review-async.png)
### Next Steps
If you're familiar with this example and want to take it further, there are some ideas:
- use the beta "fine tuning" feature to export your human responses for usage in fine-tuning

View File

@@ -0,0 +1,13 @@
{
"include": ["**/*.ts"],
"exclude": ["node_modules"],
"compilerOptions": {
"target": "es2016" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
"module": "commonjs" /* Specify what module code is generated. */,
"esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
"strict": true /* Enable all strict type-checking options. */,
"skipLibCheck": true /* Skip type checking all .d.ts files. */,
"outDir": "./dist"
}
}

1
examples/ts_openai_client/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
dist/

View File

@@ -9,6 +9,7 @@
"version": "0.1.0",
"license": "Apache-2.0",
"dependencies": {
"dotenv": "^16.4.7",
"humanlayer": "0.7.0",
"openai": "^4.0.0"
},
@@ -19,21 +20,6 @@
"typescript": "^5.5.2"
}
},
"../../humanlayer-ts": {
"name": "humanlayer",
"version": "0.7.0",
"license": "Apache-2.0",
"devDependencies": {
"@types/jest": "^29.5.12",
"@types/node": "^20.14.9",
"eslint": "^8.57.0",
"jest": "^29.7.0",
"prettier": "^3.3.2",
"ts-jest": "^29.1.5",
"ts-node": "^10.9.2",
"typescript": "^5.5.2"
}
},
"node_modules/@cspotcode/source-map-support": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
@@ -673,6 +659,18 @@
"node": ">=0.3.1"
}
},
"node_modules/dotenv": {
"version": "16.4.7",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
"integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==",
"license": "BSD-2-Clause",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/es-define-property": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz",
@@ -902,8 +900,10 @@
}
},
"node_modules/humanlayer": {
"resolved": "../../humanlayer-ts",
"link": true
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/humanlayer/-/humanlayer-0.7.0.tgz",
"integrity": "sha512-FTdSD48SzgNUobHgQ5jjaNStl7Yt2V0qx9Bh7MGLLr/WU5l4+LwL3q7ilCQcweftNWSJzzHwggW/vhik9jcvpw==",
"license": "Apache-2.0"
},
"node_modules/make-error": {
"version": "1.3.6",

View File

@@ -9,7 +9,8 @@
"example": "tsx --env-file=.env 01-index.ts",
"human-as-tool": "tsx --env-file=.env 02-human-as-tool.ts",
"human-email": "tsx --env-file=.env 03-human-email.ts",
"agent-side": "tsx --env-file=.env 04-agent-side-approvals.ts"
"agent-side": "tsx --env-file=.env 04-agent-side-approvals.ts",
"task-classifier": "tsx --env-file=.env 05-task-classifier.ts"
},
"repository": {
"type": "git",
@@ -24,6 +25,7 @@
"author": "HumanLayer Authors",
"license": "Apache-2.0",
"dependencies": {
"dotenv": "^16.4.7",
"humanlayer": "0.7.0",
"openai": "^4.0.0"
},

View File

@@ -7,6 +7,7 @@
"esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
"strict": true /* Enable all strict type-checking options. */,
"skipLibCheck": true /* Skip type checking all .d.ts files. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */,
"outDir": "./dist"
}
}

View File

@@ -41,6 +41,7 @@ export interface HumanLayerParams {
apiKey?: string
apiBaseUrl?: string
verbose?: boolean
httpTimeoutSeconds?: number
}
export class HumanLayer {
@@ -65,6 +66,7 @@ export class HumanLayer {
apiKey,
apiBaseUrl,
verbose = false,
httpTimeoutSeconds = parseInt(process.env.HUMANLAYER_HTTP_TIMEOUT_SECONDS || '10'),
} = params || {}
this.genid = genid
this.sleep = sleep

View File

@@ -4,10 +4,12 @@ import { FunctionCall, FunctionCallStatus, HumanContact, HumanContactStatus } fr
class HumanLayerCloudConnection {
apiKey?: string
apiBaseURL?: string
httpTimeoutSeconds: number
constructor(api_key?: string, api_base_url?: string) {
constructor(api_key?: string, api_base_url?: string, http_timeout_seconds: number = 10) {
this.apiKey = api_key
this.apiBaseURL = api_base_url
this.httpTimeoutSeconds = http_timeout_seconds
if (!this.apiKey) {
throw new Error('HUMANLAYER_API_KEY is required for cloud approvals')

View File

@@ -3,6 +3,7 @@ type FunctionCallStatus = {
responded_at?: Date
approved?: boolean
comment?: string
reject_option_name?: string
}
type SlackContactChannel = {
@@ -46,6 +47,7 @@ type ResponseOption = {
title?: string
description?: string
prompt_fill?: string
interactive?: boolean
}
type FunctionCallSpec = {
@@ -83,6 +85,8 @@ type HumanContactStatus = {
responded_at?: Date
// the response from the human
response?: string
// the name of the selected response option
response_option_name?: string
}
type HumanContact = {
@@ -96,7 +100,6 @@ type HumanContact = {
}
export {
FunctionCallStatus,
SlackContactChannel,
SMSContactChannel,
WhatsAppContactChannel,
@@ -104,6 +107,7 @@ export {
ContactChannel,
ResponseOption,
FunctionCallSpec,
FunctionCallStatus,
FunctionCall,
HumanContactSpec,
HumanContactStatus,

View File

@@ -355,7 +355,11 @@ class HumanLayer(BaseModel):
if contact_channel.email:
contact_human.__doc__ = "Contact a human via email and wait for a response"
contact_human.__name__ = "contact_human_via_email"
contact_human.__annotations__ = {"subject": str, "message": str, "return": str}
contact_human.__annotations__ = {
"subject": str,
"message": str,
"return": str,
}
if contact_channel.email.address:
fn_ctx = re.sub(r"[^a-zA-Z0-9]+", "_", contact_channel.email.address)
fn_ctx = re.sub(r"_+", "_", fn_ctx).strip("_")

View File

@@ -47,3 +47,13 @@ def test_env_var_cloud() -> None:
assert isinstance(hl.backend, CloudHumanLayerBackend)
assert hl.backend.connection.api_key == "foo"
assert hl.backend.connection.api_base_url == "https://api.humanlayer.dev/humanlayer/v1"
def test_timeout() -> None:
with (
env_var("HUMANLAYER_HTTP_TIMEOUT_SECONDS", "30"),
env_var("HUMANLAYER_API_KEY", "foo"),
):
hl = HumanLayer()
assert hl.backend is not None
assert hl.backend.connection.http_timeout == 30

View File

@@ -46,7 +46,7 @@ class AsyncHumanLayerCloudConnection(BaseModel):
session.request(
method,
f"{self.api_base_url}{path}",
timeout=aiohttp.ClientTimeout(total=10),
timeout=aiohttp.ClientTimeout(total=self.http_timeout_seconds),
**kwargs,
) as response,
):

View File

@@ -3,7 +3,7 @@ import logging
import os
import requests
from pydantic import BaseModel, model_validator
from pydantic import BaseModel, Field, model_validator
from humanlayer.core.models import (
FunctionCall,
@@ -23,6 +23,7 @@ logger = logging.getLogger(__name__)
class HumanLayerCloudConnection(BaseModel):
api_key: str | None = None
api_base_url: str | None = None
http_timeout_seconds: int = Field(default_factory=lambda: int(os.getenv("HUMANLAYER_HTTP_TIMEOUT_SECONDS", "10")))
@model_validator(mode="after") # type: ignore
def post_validate(self) -> None:
@@ -43,7 +44,7 @@ class HumanLayerCloudConnection(BaseModel):
method,
f"{self.api_base_url}{path}",
headers={"Authorization": f"Bearer {self.api_key}"},
timeout=10,
timeout=self.http_timeout_seconds,
**kwargs,
)

View File

@@ -152,6 +152,7 @@ class ResponseOption(BaseModel):
title: str | None = None
description: str | None = None
prompt_fill: str | None = None
interactive: bool = False
class FunctionCallSpec(BaseModel):
@@ -167,6 +168,7 @@ class FunctionCallStatus(BaseModel):
responded_at: datetime | None = None
approved: bool | None = None
comment: str | None = None
reject_option_name: str | None = None
class Approved(BaseModel):
approved: Literal[True]
@@ -204,7 +206,9 @@ class FunctionCall(BaseModel):
class Completed(BaseModel):
call: "FunctionCall"
def as_completed(self) -> FunctionCallStatus.Approved | FunctionCallStatus.Rejected:
def as_completed(
self,
) -> FunctionCallStatus.Approved | FunctionCallStatus.Rejected:
if self.call.status is None:
raise ValueError("FunctionCall.Completed.as_completed() called before approval")
return self.call.status.as_completed()
@@ -222,6 +226,7 @@ class HumanContactStatus(BaseModel):
requested_at: datetime | None = None
responded_at: datetime | None = None
response: str | None = None
response_option_name: str | None = None
class HumanContact(BaseModel):