mirror of
https://github.com/charmbracelet/crush.git
synced 2025-08-02 05:20:46 +03:00
ci: auto generate on changes
This commit is contained in:
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1 +1,2 @@
|
||||
*.golden linguist-generated=true -text
|
||||
.github/crush-schema.json linguist-generated=true
|
||||
|
||||
397
.github/crush-schema.json
generated
vendored
Normal file
397
.github/crush-schema.json
generated
vendored
Normal file
@@ -0,0 +1,397 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://github.com/charmbracelet/crush/internal/config/config",
|
||||
"$ref": "#/$defs/Config",
|
||||
"$defs": {
|
||||
"Config": {
|
||||
"properties": {
|
||||
"models": {
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/SelectedModel"
|
||||
},
|
||||
"type": "object",
|
||||
"description": "Model configurations for different model types"
|
||||
},
|
||||
"providers": {
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/ProviderConfig"
|
||||
},
|
||||
"type": "object",
|
||||
"description": "AI provider configurations"
|
||||
},
|
||||
"mcp": {
|
||||
"$ref": "#/$defs/MCPs",
|
||||
"description": "Model Context Protocol server configurations"
|
||||
},
|
||||
"lsp": {
|
||||
"$ref": "#/$defs/LSPs",
|
||||
"description": "Language Server Protocol configurations"
|
||||
},
|
||||
"options": {
|
||||
"$ref": "#/$defs/Options",
|
||||
"description": "General application options"
|
||||
},
|
||||
"permissions": {
|
||||
"$ref": "#/$defs/Permissions",
|
||||
"description": "Permission settings for tool usage"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"type": "object"
|
||||
},
|
||||
"LSPConfig": {
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"description": "Whether this LSP server is disabled",
|
||||
"default": false
|
||||
},
|
||||
"command": {
|
||||
"type": "string",
|
||||
"description": "Command to execute for the LSP server",
|
||||
"examples": [
|
||||
"gopls"
|
||||
]
|
||||
},
|
||||
"args": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array",
|
||||
"description": "Arguments to pass to the LSP server command"
|
||||
},
|
||||
"options": {
|
||||
"description": "LSP server-specific configuration options"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"type": "object",
|
||||
"required": [
|
||||
"command"
|
||||
]
|
||||
},
|
||||
"LSPs": {
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/LSPConfig"
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"MCPConfig": {
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string",
|
||||
"description": "Command to execute for stdio MCP servers",
|
||||
"examples": [
|
||||
"npx"
|
||||
]
|
||||
},
|
||||
"env": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "object",
|
||||
"description": "Environment variables to set for the MCP server"
|
||||
},
|
||||
"args": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array",
|
||||
"description": "Arguments to pass to the MCP server command"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/$defs/MCPType",
|
||||
"description": "Type of MCP connection"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"format": "uri",
|
||||
"description": "URL for HTTP or SSE MCP servers",
|
||||
"examples": [
|
||||
"http://localhost:3000/mcp"
|
||||
]
|
||||
},
|
||||
"disabled": {
|
||||
"type": "boolean",
|
||||
"description": "Whether this MCP server is disabled",
|
||||
"default": false
|
||||
},
|
||||
"headers": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "object",
|
||||
"description": "HTTP headers for HTTP/SSE MCP servers"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type"
|
||||
]
|
||||
},
|
||||
"MCPType": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"stdio",
|
||||
"sse",
|
||||
"http"
|
||||
],
|
||||
"description": "Type of MCP connection protocol",
|
||||
"default": "stdio"
|
||||
},
|
||||
"MCPs": {
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/MCPConfig"
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"Model": {
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"cost_per_1m_in": {
|
||||
"type": "number"
|
||||
},
|
||||
"cost_per_1m_out": {
|
||||
"type": "number"
|
||||
},
|
||||
"cost_per_1m_in_cached": {
|
||||
"type": "number"
|
||||
},
|
||||
"cost_per_1m_out_cached": {
|
||||
"type": "number"
|
||||
},
|
||||
"context_window": {
|
||||
"type": "integer"
|
||||
},
|
||||
"default_max_tokens": {
|
||||
"type": "integer"
|
||||
},
|
||||
"can_reason": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"has_reasoning_efforts": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"default_reasoning_effort": {
|
||||
"type": "string"
|
||||
},
|
||||
"supports_attachments": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"type": "object",
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"cost_per_1m_in",
|
||||
"cost_per_1m_out",
|
||||
"cost_per_1m_in_cached",
|
||||
"cost_per_1m_out_cached",
|
||||
"context_window",
|
||||
"default_max_tokens",
|
||||
"can_reason",
|
||||
"has_reasoning_efforts",
|
||||
"supports_attachments"
|
||||
]
|
||||
},
|
||||
"Options": {
|
||||
"properties": {
|
||||
"context_paths": {
|
||||
"items": {
|
||||
"type": "string",
|
||||
"examples": [
|
||||
".cursorrules",
|
||||
"CRUSH.md"
|
||||
]
|
||||
},
|
||||
"type": "array",
|
||||
"description": "Paths to files containing context information for the AI"
|
||||
},
|
||||
"tui": {
|
||||
"$ref": "#/$defs/TUIOptions",
|
||||
"description": "Terminal user interface options"
|
||||
},
|
||||
"debug": {
|
||||
"type": "boolean",
|
||||
"description": "Enable debug logging",
|
||||
"default": false
|
||||
},
|
||||
"debug_lsp": {
|
||||
"type": "boolean",
|
||||
"description": "Enable debug logging for LSP servers",
|
||||
"default": false
|
||||
},
|
||||
"disable_auto_summarize": {
|
||||
"type": "boolean",
|
||||
"description": "Disable automatic conversation summarization",
|
||||
"default": false
|
||||
},
|
||||
"data_directory": {
|
||||
"type": "string",
|
||||
"description": "Directory for storing application data (relative to working directory)",
|
||||
"default": ".crush",
|
||||
"examples": [
|
||||
".crush"
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"type": "object"
|
||||
},
|
||||
"Permissions": {
|
||||
"properties": {
|
||||
"allowed_tools": {
|
||||
"items": {
|
||||
"type": "string",
|
||||
"examples": [
|
||||
"bash",
|
||||
"view"
|
||||
]
|
||||
},
|
||||
"type": "array",
|
||||
"description": "List of tools that don't require permission prompts"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"type": "object"
|
||||
},
|
||||
"ProviderConfig": {
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for the provider",
|
||||
"examples": [
|
||||
"openai"
|
||||
]
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Human-readable name for the provider",
|
||||
"examples": [
|
||||
"OpenAI"
|
||||
]
|
||||
},
|
||||
"base_url": {
|
||||
"type": "string",
|
||||
"format": "uri",
|
||||
"description": "Base URL for the provider's API",
|
||||
"examples": [
|
||||
"https://api.openai.com/v1"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"openai",
|
||||
"anthropic",
|
||||
"gemini",
|
||||
"azure",
|
||||
"vertexai"
|
||||
],
|
||||
"description": "Provider type that determines the API format",
|
||||
"default": "openai"
|
||||
},
|
||||
"api_key": {
|
||||
"type": "string",
|
||||
"description": "API key for authentication with the provider",
|
||||
"examples": [
|
||||
"$OPENAI_API_KEY"
|
||||
]
|
||||
},
|
||||
"disable": {
|
||||
"type": "boolean",
|
||||
"description": "Whether this provider is disabled",
|
||||
"default": false
|
||||
},
|
||||
"system_prompt_prefix": {
|
||||
"type": "string",
|
||||
"description": "Custom prefix to add to system prompts for this provider"
|
||||
},
|
||||
"extra_headers": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "object",
|
||||
"description": "Additional HTTP headers to send with requests"
|
||||
},
|
||||
"extra_body": {
|
||||
"type": "object",
|
||||
"description": "Additional fields to include in request bodies"
|
||||
},
|
||||
"models": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/Model"
|
||||
},
|
||||
"type": "array",
|
||||
"description": "List of models available from this provider"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"type": "object"
|
||||
},
|
||||
"SelectedModel": {
|
||||
"properties": {
|
||||
"model": {
|
||||
"type": "string",
|
||||
"description": "The model ID as used by the provider API",
|
||||
"examples": [
|
||||
"gpt-4o"
|
||||
]
|
||||
},
|
||||
"provider": {
|
||||
"type": "string",
|
||||
"description": "The model provider ID that matches a key in the providers config",
|
||||
"examples": [
|
||||
"openai"
|
||||
]
|
||||
},
|
||||
"reasoning_effort": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"low",
|
||||
"medium",
|
||||
"high"
|
||||
],
|
||||
"description": "Reasoning effort level for OpenAI models that support it"
|
||||
},
|
||||
"max_tokens": {
|
||||
"type": "integer",
|
||||
"maximum": 200000,
|
||||
"minimum": 1,
|
||||
"description": "Maximum number of tokens for model responses",
|
||||
"examples": [
|
||||
4096
|
||||
]
|
||||
},
|
||||
"think": {
|
||||
"type": "boolean",
|
||||
"description": "Enable thinking mode for Anthropic models that support reasoning"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model",
|
||||
"provider"
|
||||
]
|
||||
},
|
||||
"TUIOptions": {
|
||||
"properties": {
|
||||
"compact_mode": {
|
||||
"type": "boolean",
|
||||
"description": "Enable compact mode for the TUI interface",
|
||||
"default": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
}
|
||||
26
.github/workflows/schema-update.yml
vendored
Normal file
26
.github/workflows/schema-update.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Update Schema
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "internal/config/**"
|
||||
|
||||
jobs:
|
||||
update-schema:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
- run: go run . schema > .github/crush-schema.json
|
||||
- uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v5
|
||||
with:
|
||||
commit_message: "chore: auto-update generated files"
|
||||
branch: main
|
||||
commit_user_name: actions-user
|
||||
commit_user_email: actions@github.com
|
||||
commit_author: actions-user <actions@github.com>
|
||||
@@ -85,6 +85,7 @@ Crush can use LSPs for additional context to help inform its decisions, just lik
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://charm.land/crush.json",
|
||||
"lsp": {
|
||||
"go": {
|
||||
"command": "gopls"
|
||||
@@ -106,6 +107,7 @@ Crush supports Model Context Protocol (MCP) servers through three transport type
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://charm.land/crush.json",
|
||||
"mcp": {
|
||||
"filesystem": {
|
||||
"type": "stdio",
|
||||
@@ -136,6 +138,7 @@ Crush supports Model Context Protocol (MCP) servers through three transport type
|
||||
### Logging
|
||||
|
||||
Enable debug logging with the `-d` flag or in config. View logs with `crush logs`. Logs are stored in `.crush/logs/crush.log`.
|
||||
|
||||
```bash
|
||||
# Run with debug logging
|
||||
crush -d
|
||||
@@ -154,6 +157,7 @@ Add to your `crush.json` config file:
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://charm.land/crush.json",
|
||||
"options": {
|
||||
"debug": true,
|
||||
"debug_lsp": true
|
||||
@@ -167,6 +171,7 @@ Crush includes a permission system to control which tools can be executed withou
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://charm.land/crush.json",
|
||||
"permissions": {
|
||||
"allowed_tools": [
|
||||
"view",
|
||||
@@ -196,6 +201,7 @@ Here's an example configuration for Deepseek, which uses an OpenAI-compatible AP
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://charm.land/crush.json",
|
||||
"providers": {
|
||||
"deepseek": {
|
||||
"type": "openai",
|
||||
@@ -224,6 +230,7 @@ You can also configure custom Anthropic-compatible providers:
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://charm.land/crush.json",
|
||||
"providers": {
|
||||
"custom-anthropic": {
|
||||
"type": "anthropic",
|
||||
|
||||
@@ -3,6 +3,7 @@ package cmd
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/charmbracelet/crush/internal/config"
|
||||
"github.com/invopop/jsonschema"
|
||||
@@ -15,9 +16,46 @@ var schemaCmd = &cobra.Command{
|
||||
Long: "Generate JSON schema for the crush configuration file",
|
||||
Hidden: true,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
reflector := jsonschema.Reflector{}
|
||||
reflector := jsonschema.Reflector{
|
||||
// Custom type mapper to handle csync.Map
|
||||
Mapper: func(t reflect.Type) *jsonschema.Schema {
|
||||
// Handle csync.Map[string, ProviderConfig] specifically
|
||||
if t.String() == "csync.Map[string,github.com/charmbracelet/crush/internal/config.ProviderConfig]" {
|
||||
return &jsonschema.Schema{
|
||||
Type: "object",
|
||||
Description: "AI provider configurations",
|
||||
AdditionalProperties: &jsonschema.Schema{
|
||||
Ref: "#/$defs/ProviderConfig",
|
||||
},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
// First reflect the config to get the main schema
|
||||
schema := reflector.Reflect(&config.Config{})
|
||||
|
||||
// Now manually add the ProviderConfig definition that might be missing
|
||||
providerConfigSchema := reflector.ReflectFromType(reflect.TypeOf(config.ProviderConfig{}))
|
||||
if schema.Definitions == nil {
|
||||
schema.Definitions = make(map[string]*jsonschema.Schema)
|
||||
}
|
||||
|
||||
// Extract the actual definition from the nested schema
|
||||
if providerConfigSchema.Definitions != nil && providerConfigSchema.Definitions["ProviderConfig"] != nil {
|
||||
schema.Definitions["ProviderConfig"] = providerConfigSchema.Definitions["ProviderConfig"]
|
||||
// Also add any other definitions from the provider config schema
|
||||
for k, v := range providerConfigSchema.Definitions {
|
||||
if k != "ProviderConfig" {
|
||||
schema.Definitions[k] = v
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Fallback: use the schema itself if it's not nested
|
||||
schema.Definitions["ProviderConfig"] = providerConfigSchema
|
||||
}
|
||||
|
||||
schemaJSON, err := json.MarshalIndent(schema, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal schema: %w", err)
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/charmbracelet/catwalk/pkg/catwalk"
|
||||
"github.com/charmbracelet/crush/internal/csync"
|
||||
"github.com/charmbracelet/crush/internal/env"
|
||||
"github.com/invopop/jsonschema"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
@@ -45,51 +46,61 @@ const (
|
||||
SelectedModelTypeSmall SelectedModelType = "small"
|
||||
)
|
||||
|
||||
// JSONSchema returns the JSON schema for SelectedModelType
|
||||
func (SelectedModelType) JSONSchema() *jsonschema.Schema {
|
||||
return &jsonschema.Schema{
|
||||
Type: "string",
|
||||
Description: "Model type selection for different use cases",
|
||||
Enum: []any{"large", "small"},
|
||||
Default: "large",
|
||||
}
|
||||
}
|
||||
|
||||
type SelectedModel struct {
|
||||
// The model id as used by the provider API.
|
||||
// Required.
|
||||
Model string `json:"model"`
|
||||
Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
|
||||
// The model provider, same as the key/id used in the providers config.
|
||||
// Required.
|
||||
Provider string `json:"provider"`
|
||||
Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
|
||||
|
||||
// Only used by models that use the openai provider and need this set.
|
||||
ReasoningEffort string `json:"reasoning_effort,omitempty"`
|
||||
ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
|
||||
|
||||
// Overrides the default model configuration.
|
||||
MaxTokens int64 `json:"max_tokens,omitempty"`
|
||||
MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
|
||||
|
||||
// Used by anthropic models that can reason to indicate if the model should think.
|
||||
Think bool `json:"think,omitempty"`
|
||||
Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
|
||||
}
|
||||
|
||||
type ProviderConfig struct {
|
||||
// The provider's id.
|
||||
ID string `json:"id,omitempty"`
|
||||
ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
|
||||
// The provider's name, used for display purposes.
|
||||
Name string `json:"name,omitempty"`
|
||||
Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
|
||||
// The provider's API endpoint.
|
||||
BaseURL string `json:"base_url,omitempty"`
|
||||
BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
|
||||
// The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
|
||||
Type catwalk.Type `json:"type,omitempty"`
|
||||
Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
|
||||
// The provider's API key.
|
||||
APIKey string `json:"api_key,omitempty"`
|
||||
APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
|
||||
// Marks the provider as disabled.
|
||||
Disable bool `json:"disable,omitempty"`
|
||||
Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
|
||||
|
||||
// Custom system prompt prefix.
|
||||
SystemPromptPrefix string `json:"system_prompt_prefix,omitempty"`
|
||||
SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
|
||||
|
||||
// Extra headers to send with each request to the provider.
|
||||
ExtraHeaders map[string]string `json:"extra_headers,omitempty"`
|
||||
ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
|
||||
// Extra body
|
||||
ExtraBody map[string]any `json:"extra_body,omitempty"`
|
||||
ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies"`
|
||||
|
||||
// Used to pass extra parameters to the provider.
|
||||
ExtraParams map[string]string `json:"-"`
|
||||
|
||||
// The provider models
|
||||
Models []catwalk.Model `json:"models,omitempty"`
|
||||
Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
|
||||
}
|
||||
|
||||
type MCPType string
|
||||
@@ -100,42 +111,52 @@ const (
|
||||
MCPHttp MCPType = "http"
|
||||
)
|
||||
|
||||
// JSONSchema returns the JSON schema for MCPType
|
||||
func (MCPType) JSONSchema() *jsonschema.Schema {
|
||||
return &jsonschema.Schema{
|
||||
Type: "string",
|
||||
Description: "Type of MCP connection protocol",
|
||||
Enum: []any{"stdio", "sse", "http"},
|
||||
Default: "stdio",
|
||||
}
|
||||
}
|
||||
|
||||
type MCPConfig struct {
|
||||
Command string `json:"command,omitempty" `
|
||||
Env map[string]string `json:"env,omitempty"`
|
||||
Args []string `json:"args,omitempty"`
|
||||
Type MCPType `json:"type"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Disabled bool `json:"disabled,omitempty"`
|
||||
Command string `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
|
||||
Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
|
||||
Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
|
||||
Type MCPType `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
|
||||
URL string `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
|
||||
Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
|
||||
|
||||
// TODO: maybe make it possible to get the value from the env
|
||||
Headers map[string]string `json:"headers,omitempty"`
|
||||
Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
|
||||
}
|
||||
|
||||
type LSPConfig struct {
|
||||
Disabled bool `json:"enabled,omitempty"`
|
||||
Command string `json:"command"`
|
||||
Args []string `json:"args,omitempty"`
|
||||
Options any `json:"options,omitempty"`
|
||||
Disabled bool `json:"enabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
|
||||
Command string `json:"command" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
|
||||
Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
|
||||
Options any `json:"options,omitempty" jsonschema:"description=LSP server-specific configuration options"`
|
||||
}
|
||||
|
||||
type TUIOptions struct {
|
||||
CompactMode bool `json:"compact_mode,omitempty"`
|
||||
CompactMode bool `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
|
||||
// Here we can add themes later or any TUI related options
|
||||
}
|
||||
|
||||
type Permissions struct {
|
||||
AllowedTools []string `json:"allowed_tools,omitempty"` // Tools that don't require permission prompts
|
||||
AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
|
||||
SkipRequests bool `json:"-"` // Automatically accept all permissions (YOLO mode)
|
||||
}
|
||||
|
||||
type Options struct {
|
||||
ContextPaths []string `json:"context_paths,omitempty"`
|
||||
TUI *TUIOptions `json:"tui,omitempty"`
|
||||
Debug bool `json:"debug,omitempty"`
|
||||
DebugLSP bool `json:"debug_lsp,omitempty"`
|
||||
DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty"`
|
||||
DataDirectory string `json:"data_directory,omitempty"` // Relative to the cwd
|
||||
ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
|
||||
TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
|
||||
Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
|
||||
DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
|
||||
DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
|
||||
DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
|
||||
}
|
||||
|
||||
type MCPs map[string]MCPConfig
|
||||
@@ -241,18 +262,18 @@ type Agent struct {
|
||||
// Config holds the configuration for crush.
|
||||
type Config struct {
|
||||
// We currently only support large/small as values here.
|
||||
Models map[SelectedModelType]SelectedModel `json:"models,omitempty"`
|
||||
Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
|
||||
|
||||
// The providers that are configured
|
||||
Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty"`
|
||||
Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
|
||||
|
||||
MCP MCPs `json:"mcp,omitempty"`
|
||||
MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
|
||||
|
||||
LSP LSPs `json:"lsp,omitempty"`
|
||||
LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
|
||||
|
||||
Options *Options `json:"options,omitempty"`
|
||||
Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
|
||||
|
||||
Permissions *Permissions `json:"permissions,omitempty"`
|
||||
Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
|
||||
|
||||
// Internal
|
||||
workingDir string `json:"-"`
|
||||
|
||||
Reference in New Issue
Block a user