mirror of
https://github.com/charmbracelet/crush.git
synced 2025-08-02 05:20:46 +03:00
Merge pull request #270 from charmbracelet/comments
chore: lint and tidy comments
This commit is contained in:
@@ -53,6 +53,7 @@ type App struct {
|
||||
cleanupFuncs []func()
|
||||
}
|
||||
|
||||
// New initializes a new applcation instance.
|
||||
func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
|
||||
q := db.New(conn)
|
||||
sessions := session.NewService(q)
|
||||
@@ -78,10 +79,10 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
|
||||
|
||||
app.setupEvents()
|
||||
|
||||
// Initialize LSP clients in the background
|
||||
// Initialize LSP clients in the background.
|
||||
go app.initLSPClients(ctx)
|
||||
|
||||
// TODO: remove the concept of agent config most likely
|
||||
// TODO: remove the concept of agent config, most likely.
|
||||
if cfg.IsConfigured() {
|
||||
if err := app.InitCoderAgent(); err != nil {
|
||||
return nil, fmt.Errorf("failed to initialize coder agent: %w", err)
|
||||
@@ -97,20 +98,22 @@ func (app *App) Config() *config.Config {
|
||||
return app.config
|
||||
}
|
||||
|
||||
// RunNonInteractive handles the execution flow when a prompt is provided via CLI flag.
|
||||
// RunNonInteractive handles the execution flow when a prompt is provided via
|
||||
// CLI flag.
|
||||
func (app *App) RunNonInteractive(ctx context.Context, prompt string, quiet bool) error {
|
||||
slog.Info("Running in non-interactive mode")
|
||||
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
defer cancel()
|
||||
|
||||
// Start spinner if not in quiet mode
|
||||
// Start spinner if not in quiet mode.
|
||||
var spinner *format.Spinner
|
||||
if !quiet {
|
||||
spinner = format.NewSpinner(ctx, cancel, "Generating")
|
||||
spinner.Start()
|
||||
}
|
||||
// Helper function to stop spinner once
|
||||
|
||||
// Helper function to stop spinner once.
|
||||
stopSpinner := func() {
|
||||
if !quiet && spinner != nil {
|
||||
spinner.Stop()
|
||||
@@ -257,9 +260,10 @@ func (app *App) InitCoderAgent() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Subscribe sends events to the TUI as tea.Msgs.
|
||||
func (app *App) Subscribe(program *tea.Program) {
|
||||
defer log.RecoverPanic("app.Subscribe", func() {
|
||||
slog.Info("TUI subscription panic - attempting graceful shutdown")
|
||||
slog.Info("TUI subscription panic: attempting graceful shutdown")
|
||||
program.Quit()
|
||||
})
|
||||
|
||||
@@ -271,6 +275,7 @@ func (app *App) Subscribe(program *tea.Program) {
|
||||
app.tuiWG.Wait()
|
||||
})
|
||||
defer app.tuiWG.Done()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-tuiCtx.Done():
|
||||
@@ -286,23 +291,28 @@ func (app *App) Subscribe(program *tea.Program) {
|
||||
}
|
||||
}
|
||||
|
||||
// Shutdown performs a clean shutdown of the application
|
||||
// Shutdown performs a graceful shutdown of the application.
|
||||
func (app *App) Shutdown() {
|
||||
if app.CoderAgent != nil {
|
||||
app.CoderAgent.CancelAll()
|
||||
}
|
||||
|
||||
app.cancelFuncsMutex.Lock()
|
||||
for _, cancel := range app.watcherCancelFuncs {
|
||||
cancel()
|
||||
}
|
||||
app.cancelFuncsMutex.Unlock()
|
||||
|
||||
// Wait for all LSP watchers to finish.
|
||||
app.lspWatcherWG.Wait()
|
||||
|
||||
// Get all LSP clients.
|
||||
app.clientsMutex.RLock()
|
||||
clients := make(map[string]*lsp.Client, len(app.LSPClients))
|
||||
maps.Copy(clients, app.LSPClients)
|
||||
app.clientsMutex.RUnlock()
|
||||
|
||||
// Shutdown all LSP clients.
|
||||
for name, client := range clients {
|
||||
shutdownCtx, cancel := context.WithTimeout(app.globalCtx, 5*time.Second)
|
||||
if err := client.Shutdown(shutdownCtx); err != nil {
|
||||
@@ -311,6 +321,7 @@ func (app *App) Shutdown() {
|
||||
cancel()
|
||||
}
|
||||
|
||||
// Call call cleanup functions.
|
||||
for _, cleanup := range app.cleanupFuncs {
|
||||
if cleanup != nil {
|
||||
cleanup()
|
||||
|
||||
@@ -10,10 +10,9 @@ import (
|
||||
"github.com/charmbracelet/crush/internal/lsp/watcher"
|
||||
)
|
||||
|
||||
// initLSPClients initializes LSP clients.
|
||||
func (app *App) initLSPClients(ctx context.Context) {
|
||||
// Initialize LSP clients
|
||||
for name, clientConfig := range app.config.LSP {
|
||||
// Start each client initialization in its own goroutine
|
||||
go app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
|
||||
}
|
||||
slog.Info("LSP clients initialization started in background")
|
||||
@@ -21,68 +20,68 @@ func (app *App) initLSPClients(ctx context.Context) {
|
||||
|
||||
// createAndStartLSPClient creates a new LSP client, initializes it, and starts its workspace watcher
|
||||
func (app *App) createAndStartLSPClient(ctx context.Context, name string, command string, args ...string) {
|
||||
// Create a specific context for initialization with a timeout
|
||||
slog.Info("Creating LSP client", "name", name, "command", command, "args", args)
|
||||
|
||||
// Create the LSP client
|
||||
// Create LSP client.
|
||||
lspClient, err := lsp.NewClient(ctx, command, args...)
|
||||
if err != nil {
|
||||
slog.Error("Failed to create LSP client for", name, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Create a longer timeout for initialization (some servers take time to start)
|
||||
// Increase initialization timeout as some servers take more time to start.
|
||||
initCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Initialize with the initialization context
|
||||
// Initialize LSP client.
|
||||
_, err = lspClient.InitializeLSPClient(initCtx, app.config.WorkingDir())
|
||||
if err != nil {
|
||||
slog.Error("Initialize failed", "name", name, "error", err)
|
||||
// Clean up the client to prevent resource leaks
|
||||
lspClient.Close()
|
||||
return
|
||||
}
|
||||
|
||||
// Wait for the server to be ready
|
||||
// Wait for the server to be ready.
|
||||
if err := lspClient.WaitForServerReady(initCtx); err != nil {
|
||||
slog.Error("Server failed to become ready", "name", name, "error", err)
|
||||
// We'll continue anyway, as some functionality might still work
|
||||
// Server never reached a ready state, but let's continue anyway, as
|
||||
// some functionality might still work.
|
||||
lspClient.SetServerState(lsp.StateError)
|
||||
} else {
|
||||
// Server reached a ready state scuccessfully.
|
||||
slog.Info("LSP server is ready", "name", name)
|
||||
lspClient.SetServerState(lsp.StateReady)
|
||||
}
|
||||
|
||||
slog.Info("LSP client initialized", "name", name)
|
||||
|
||||
// Create a child context that can be canceled when the app is shutting down
|
||||
// Create a child context that can be canceled when the app is shutting
|
||||
// down.
|
||||
watchCtx, cancelFunc := context.WithCancel(ctx)
|
||||
|
||||
// Create the workspace watcher
|
||||
// Create the workspace watcher.
|
||||
workspaceWatcher := watcher.NewWorkspaceWatcher(name, lspClient)
|
||||
|
||||
// Store the cancel function to be called during cleanup
|
||||
// Store the cancel function to be called during cleanup.
|
||||
app.cancelFuncsMutex.Lock()
|
||||
app.watcherCancelFuncs = append(app.watcherCancelFuncs, cancelFunc)
|
||||
app.cancelFuncsMutex.Unlock()
|
||||
|
||||
// Add the watcher to a WaitGroup to track active goroutines
|
||||
app.lspWatcherWG.Add(1)
|
||||
|
||||
// Add to map with mutex protection before starting goroutine
|
||||
app.clientsMutex.Lock()
|
||||
app.LSPClients[name] = lspClient
|
||||
app.clientsMutex.Unlock()
|
||||
|
||||
// Run workspace watcher.
|
||||
app.lspWatcherWG.Add(1)
|
||||
go app.runWorkspaceWatcher(watchCtx, name, workspaceWatcher)
|
||||
}
|
||||
|
||||
// runWorkspaceWatcher executes the workspace watcher for an LSP client
|
||||
// runWorkspaceWatcher executes the workspace watcher for an LSP client.
|
||||
func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceWatcher *watcher.WorkspaceWatcher) {
|
||||
defer app.lspWatcherWG.Done()
|
||||
defer log.RecoverPanic("LSP-"+name, func() {
|
||||
// Try to restart the client
|
||||
// Try to restart the client.
|
||||
app.restartLSPClient(ctx, name)
|
||||
})
|
||||
|
||||
@@ -90,31 +89,32 @@ func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceW
|
||||
slog.Info("Workspace watcher stopped", "client", name)
|
||||
}
|
||||
|
||||
// restartLSPClient attempts to restart a crashed or failed LSP client
|
||||
// restartLSPClient attempts to restart a crashed or failed LSP client.
|
||||
func (app *App) restartLSPClient(ctx context.Context, name string) {
|
||||
// Get the original configuration
|
||||
// Get the original configuration.
|
||||
clientConfig, exists := app.config.LSP[name]
|
||||
if !exists {
|
||||
slog.Error("Cannot restart client, configuration not found", "client", name)
|
||||
return
|
||||
}
|
||||
|
||||
// Clean up the old client if it exists
|
||||
// Clean up the old client if it exists.
|
||||
app.clientsMutex.Lock()
|
||||
oldClient, exists := app.LSPClients[name]
|
||||
if exists {
|
||||
delete(app.LSPClients, name) // Remove from map before potentially slow shutdown
|
||||
// Remove from map before potentially slow shutdown.
|
||||
delete(app.LSPClients, name)
|
||||
}
|
||||
app.clientsMutex.Unlock()
|
||||
|
||||
if exists && oldClient != nil {
|
||||
// Try to shut it down gracefully, but don't block on errors
|
||||
// Try to shut down client gracefully, but don't block on errors.
|
||||
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
_ = oldClient.Shutdown(shutdownCtx)
|
||||
cancel()
|
||||
}
|
||||
|
||||
// Create a new client using the shared function
|
||||
// Create a new client using the shared function.
|
||||
app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
|
||||
slog.Info("Successfully restarted LSP client", "client", name)
|
||||
}
|
||||
|
||||
@@ -48,6 +48,7 @@ to assist developers in writing, debugging, and understanding code directly from
|
||||
`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
// Load the config
|
||||
// XXX: Handle errors.
|
||||
debug, _ := cmd.Flags().GetBool("debug")
|
||||
cwd, _ := cmd.Flags().GetString("cwd")
|
||||
prompt, _ := cmd.Flags().GetString("prompt")
|
||||
@@ -76,7 +77,7 @@ to assist developers in writing, debugging, and understanding code directly from
|
||||
|
||||
ctx := cmd.Context()
|
||||
|
||||
// Connect DB, this will also run migrations
|
||||
// Connect to DB; this will also run migrations.
|
||||
conn, err := db.Connect(ctx, cfg.Options.DataDirectory)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -95,13 +96,13 @@ to assist developers in writing, debugging, and understanding code directly from
|
||||
return err
|
||||
}
|
||||
|
||||
// Non-interactive mode
|
||||
// Non-interactive mode.
|
||||
if prompt != "" {
|
||||
// Run non-interactive flow using the App method
|
||||
return app.RunNonInteractive(ctx, prompt, quiet)
|
||||
}
|
||||
|
||||
// Set up the TUI
|
||||
// Set up the TUI.
|
||||
program := tea.NewProgram(
|
||||
tui.New(app),
|
||||
tea.WithAltScreen(),
|
||||
|
||||
@@ -254,9 +254,9 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
return a, tea.Batch(cmds...)
|
||||
case splash.OnboardingCompleteMsg:
|
||||
a.isConfigured = config.HasInitialDataConfig()
|
||||
updated, cmd := a.pages[a.currentPage].Update(msg)
|
||||
updated, pageCmd := a.pages[a.currentPage].Update(msg)
|
||||
a.pages[a.currentPage] = updated.(util.Model)
|
||||
cmds = append(cmds, cmd)
|
||||
cmds = append(cmds, pageCmd)
|
||||
return a, tea.Batch(cmds...)
|
||||
// Key Press Messages
|
||||
case tea.KeyPressMsg:
|
||||
@@ -268,9 +268,9 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
a.dialog = u.(dialogs.DialogCmp)
|
||||
cmds = append(cmds, dialogCmd)
|
||||
} else {
|
||||
updated, cmd := a.pages[a.currentPage].Update(msg)
|
||||
updated, pageCmd := a.pages[a.currentPage].Update(msg)
|
||||
a.pages[a.currentPage] = updated.(util.Model)
|
||||
cmds = append(cmds, cmd)
|
||||
cmds = append(cmds, pageCmd)
|
||||
}
|
||||
return a, tea.Batch(cmds...)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user