cmd

package
v0.0.0-...-b6677e7 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 28, 2025 License: MIT Imports: 34 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	Version   = "dev"
	BuildTime = "unknown"
	GitCommit = "unknown"
)

Version information - set from main package

View Source
var ChatCmd = &cobra.Command{
	Use:   "chat",
	Short: "Enter interactive chat mode with the LLM",
	Long: `Chat mode provides a conversational interface with the LLM and is the primary way to interact with the client.
The LLM can execute queries, access data, and leverage other capabilities provided by the server.

This command uses the modern interface-based approach for LLM providers, supporting all configured
provider types including OpenAI, Anthropic, Ollama, and others.`,
	RunE: func(cmd *cobra.Command, args []string) error {

		chatConfig := parseChatConfig(cmd, args)

		outputMgr := output.GetGlobalManager()

		if outputMgr.ShouldShowStartupInfo() {
			bold := color.New(color.Bold)
			serversText := strings.Join(chatConfig.ServerNames, ", ")
			if serversText == "" {
				serversText = "none"
			}
			bold.Printf("Starting chat mode with servers: %s, provider: %s, model: %s\n\n",
				serversText, chatConfig.ProviderName, chatConfig.ModelName)
		}

		chatService := chat.NewService()
		return chatService.StartChat(chatConfig)
	},
}

ChatCmd represents the unified chat command

View Source
var ConfigCmd = &cobra.Command{
	Use:   "config",
	Short: "Configuration management commands",
	Long: `Manage mcp-cli configuration files.

Available subcommands:
  validate - Validate configuration file and check for security issues

Examples:
  mcp-cli config validate
  mcp-cli config validate --config custom-config.yaml`,
}

ConfigCmd represents the config command

View Source
var ConfigValidateCmd = &cobra.Command{
	Use:   "validate",
	Short: "Validate configuration file",
	Long: `Validates the configuration file for:
- Syntax errors
- Missing required fields
- Exposed API keys (security check)
- Template validation

Examples:
  mcp-cli config validate
  mcp-cli config validate --config custom-config.json`,
	RunE: func(cmd *cobra.Command, args []string) error {
		fmt.Println("Validating configuration...")

		configService := config.NewService()
		appConfig, err := configService.LoadConfig(configFile)
		if err != nil {
			fmt.Printf("❌ Failed to load config: %v\n", err)
			return err
		}

		if err := configService.ValidateConfig(appConfig); err != nil {
			fmt.Printf("❌ Configuration validation failed: %v\n", err)
			return err
		}

		fmt.Println("✓ Configuration syntax is valid")

		hasExposedKeys := false

		if appConfig.AI != nil && appConfig.AI.Interfaces != nil {
			for interfaceType, interfaceConfig := range appConfig.AI.Interfaces {
				for providerName, providerConfig := range interfaceConfig.Providers {
					if isExposedKey(providerConfig.APIKey) {
						fmt.Printf("⚠️  Warning: API key for %s/%s appears to be hardcoded\n",
							interfaceType, providerName)
						fmt.Println("   Consider moving to .env file: " + providerName + "_API_KEY")
						hasExposedKeys = true
					}
				}
			}
		}

		if appConfig.Embeddings != nil && appConfig.Embeddings.Interfaces != nil {
			for interfaceType, interfaceConfig := range appConfig.Embeddings.Interfaces {
				for providerName, providerConfig := range interfaceConfig.Providers {
					if isExposedKey(providerConfig.APIKey) {
						fmt.Printf("⚠️  Warning: Embedding API key for %s/%s appears to be hardcoded\n",
							interfaceType, providerName)
						fmt.Println("   Consider moving to .env file")
						hasExposedKeys = true
					}
				}
			}
		}

		if hasExposedKeys {
			fmt.Println("\n💡 Security Tip:")
			fmt.Println("   1. Create a .env file: cp .env.example .env")
			fmt.Println("   2. Add your keys: OPENAI_API_KEY=sk-...")
			fmt.Println("   3. Update config: \"api_key\": \"${OPENAI_API_KEY}\"")
			fmt.Println("   4. Add .env to .gitignore (already done)")
		} else {
			fmt.Println("✓ No exposed API keys found")
		}

		envPath := ".env"
		if _, err := os.Stat(envPath); os.IsNotExist(err) {
			fmt.Println("\n💡 Tip: Create a .env file for API keys")
			fmt.Println("   cp .env.example .env")
		} else {
			fmt.Println("✓ .env file found")
		}

		fmt.Println("\n✅ Configuration is valid!")

		if hasExposedKeys {
			fmt.Println("\n⚠️  However, you should move hardcoded API keys to .env file for security")
			os.Exit(1)
		}

		return nil
	},
}

ConfigValidateCmd validates the configuration file

View Source
var EmbeddingsCmd = &cobra.Command{
	Use:   "embeddings [text]",
	Short: "Generate vector embeddings from text input",
	Long: `Generate vector embeddings from text input using various embedding models.

Supports multiple input sources:
- Direct text argument
- Standard input (stdin)
- File input

Text is automatically chunked using configurable strategies to handle
large inputs and optimize embedding quality.

Examples:
  # Basic usage with stdin
  echo "Your text here" | mcp-cli embeddings
  
  # File input with specific model
  mcp-cli embeddings --input-file document.txt --model text-embedding-3-large
  
  # Advanced chunking and output
  mcp-cli embeddings --chunk-strategy sentence --max-chunk-size 512 --output-format json --overlap 50
  
  # Direct text input
  mcp-cli embeddings "Analyze this specific text"
  
  # Show available models and strategies
  mcp-cli embeddings --show-models
  mcp-cli embeddings --show-strategies`,
	RunE: executeEmbeddings,
}

EmbeddingsCmd represents the embeddings command

View Source
var InitCmd = &cobra.Command{
	Use:   "init",
	Short: "Initialize mcp-cli configuration",
	Long: `Interactive setup wizard for mcp-cli configuration.

Creates a modular configuration structure with separate directories for
providers, embeddings, servers, and templates.

Modes:
  --quick     Quick setup with minimal questions (uses ollama, no API keys)
  --full      Full setup with all configuration options
  (default)   Standard interactive setup

Examples:
  mcp-cli init              # Interactive setup
  mcp-cli init --quick      # Quick setup (ollama only)
  mcp-cli init --full       # Complete setup wizard`,
	RunE: runInit,
}

InitCmd initializes a new mcp-cli configuration

View Source
var InteractiveCmd = &cobra.Command{
	Use:   "interactive",
	Short: "Enter interactive mode with slash commands",
	Long: `Interactive mode provides a command-line interface with slash commands for direct interaction with the server.
You can query server information, list available tools and resources, and more.`,
	RunE: func(cmd *cobra.Command, args []string) error {

		serverNames, userSpecified := host.ProcessOptions(configFile, serverName, disableFilesystem, providerName, modelName)

		bold := color.New(color.Bold)
		bold.Printf("Starting interactive mode with server: %s, provider: %s, model: %s\n\n", serverName, providerName, modelName)

		logging.Info("Starting interactive mode")

		err := host.RunCommand(runInteractiveMode, configFile, serverNames, userSpecified)
		if err != nil {
			logging.Error("Error in interactive mode: %v", err)
			fmt.Fprintf(os.Stderr, "Error in interactive mode: %v\n", err)
			return err
		}

		return nil
	},
}

InteractiveCmd represents the interactive command

View Source
var QueryCmd = &cobra.Command{
	Use:   "query [question]",
	Short: "Ask a single question and get a response",
	Long: `Query mode asks a single question to the AI model and returns a response
without entering an interactive session. Perfect for scripting, automation,
and integration with other tools.

The query command supports:
  • Multiple MCP servers for tool access
  • Context from files (--context)
  • Custom system prompts (--system-prompt)
  • JSON output for parsing (--json)
  • Raw tool data output (--raw-data)
  • File output (--output)

Examples:
  # Basic query
  mcp-cli query "What is the current time?"
  
  # With specific servers and provider
  mcp-cli query --server filesystem,brave-search \
    --provider openai --model gpt-4o \
    "Search for MCP information and summarize"
  
  # With context file
  mcp-cli query --context context.txt \
    --system-prompt "You are a coding assistant" \
    "How do I implement a binary tree in Go?"
  
  # JSON output for parsing
  mcp-cli query --json "List the top 5 cloud providers" > results.json
  
  # Verbose mode (show all operations)
  mcp-cli query --noisy "What files are in this directory?"
  
  # Raw tool data (bypass AI summarization)
  mcp-cli query --raw-data "Show latest security incidents"
  
  # Output to file
  mcp-cli query "Analyze this code" --output analysis.txt`,
	Args: cobra.MinimumNArgs(1),
	RunE: func(cmd *cobra.Command, args []string) error {

		if noisy && !verbose {

			logging.SetDefaultLevel(logging.INFO)
			logging.Info("Noisy mode enabled for query command")
		}

		question := strings.Join(args, " ")

		serverNames, userSpecified := ProcessOptions(configFile, serverName, disableFilesystem, providerName, modelName)
		logging.Debug("Server names: %v", serverNames)
		logging.Debug("Using provider from config: %s", providerName)

		enhancedAIOptions, err := host.GetEnhancedAIOptions(configFile, providerName, modelName)
		if err != nil {
			if errorCodeOnly {
				os.Exit(query.ErrConfigNotFoundCode)
			}
			return fmt.Errorf("error loading enhanced AI options: %w", err)
		}

		aiOptions := &host.AIOptions{
			Provider:      enhancedAIOptions.Provider,
			Model:         enhancedAIOptions.Model,
			APIKey:        enhancedAIOptions.APIKey,
			APIEndpoint:   enhancedAIOptions.APIEndpoint,
			InterfaceType: enhancedAIOptions.Interface,
		}

		if providerName != "" {
			aiOptions.Provider = providerName
			enhancedAIOptions.Provider = providerName
		}
		if modelName != "" {
			aiOptions.Model = modelName
			enhancedAIOptions.Model = modelName
		}

		providersWithoutAPIKey := map[string]bool{
			"bedrock":   true,
			"vertex-ai": true,
		}

		if !providersWithoutAPIKey[aiOptions.Provider] && aiOptions.Provider != "ollama" && aiOptions.APIKey == "" {
			// Try provider-specific environment variables
			var envKey string
			switch aiOptions.Provider {
			case "openai":
				envKey = os.Getenv("OPENAI_API_KEY")
			case "anthropic":
				envKey = os.Getenv("ANTHROPIC_API_KEY")
			case "gemini":
				envKey = os.Getenv("GEMINI_API_KEY")
			case "deepseek":
				envKey = os.Getenv("DEEPSEEK_API_KEY")
			case "openrouter":
				envKey = os.Getenv("OPENROUTER_API_KEY")
			case "azure-openai":
				envKey = os.Getenv("AZURE_OPENAI_API_KEY")
			}

			if envKey != "" {
				aiOptions.APIKey = envKey
			} else if aiOptions.APIKey == "" {

				if errorCodeOnly {
					os.Exit(query.ErrProviderNotFoundCode)
				}
				return fmt.Errorf("missing API key for %s", aiOptions.Provider)
			}
		}

		// Load context file if provided
		var contextContent string
		if contextFile != "" {
			content, err := ioutil.ReadFile(contextFile)
			if err != nil {
				if errorCodeOnly {
					os.Exit(query.ErrContextNotFoundCode)
				}
				return fmt.Errorf("failed to read context file: %w", err)
			}
			contextContent = string(content)
		}

		// Load the configuration file to get various settings including max follow-up attempts
		var maxToolFollowUp int
		oldCfg, err := config.LoadConfig(configFile)
		if err == nil {
			// Get the maximum tool follow-up attempts from configuration
			// Determine the primary server name for configuration lookup
			var primaryServerName string
			if len(serverNames) == 1 {
				primaryServerName = serverNames[0]
			}

			maxToolFollowUp = oldCfg.GetMaxToolFollowUp(primaryServerName)
			logging.Debug("Using max tool follow-up attempts from config: %d", maxToolFollowUp)

			if systemPrompt == "" {

				if len(serverNames) == 1 {
					configPrompt := oldCfg.GetSystemPrompt(serverNames[0])
					if configPrompt != "" {
						systemPrompt = configPrompt
						logging.Debug("Using system prompt from config for server: %s", serverNames[0])
					}
				}

				if systemPrompt == "" {
					if oldCfg.AI != nil && oldCfg.AI.DefaultSystemPrompt != "" {
						systemPrompt = oldCfg.AI.DefaultSystemPrompt
						logging.Debug("Using default system prompt from config")
					}
				}
			}
		} else {

			maxToolFollowUp = 2
			logging.Debug("Config loading failed, using default max tool follow-up attempts: %d", maxToolFollowUp)
		}

		serverRawDataOverride := make(map[string]bool)
		if oldCfg != nil {

			settings := oldCfg.GetSettings()
			if settings != nil && settings.RawDataOverride {
				rawDataOutput = true
				logging.Debug("Raw data output enabled from global settings")
			}

			for _, name := range serverNames {
				serverSettings, err := oldCfg.GetServerSettings(name)
				if err == nil && serverSettings != nil && serverSettings.RawDataOverride {
					serverRawDataOverride[name] = true
					logging.Debug("Raw data output enabled for server: %s", name)
				}
			}
		}

		// ARCHITECTURAL FIX: Choose command options based on verbosity for clean output
		var commandOptions *host.CommandOptions
		if noisy || verbose {

			commandOptions = host.DefaultCommandOptions()
		} else {

			commandOptions = host.QuietCommandOptions()
		}

		// Run the query command with the given options
		var result *query.QueryResult
		err = host.RunCommandWithOptions(func(conns []*host.ServerConnection) error {

			aiService := ai.NewService()
			llmProvider, err := aiService.InitializeProvider(configFile, providerName, modelName)
			if err != nil {
				if errorCodeOnly {
					os.Exit(query.ErrInitializationCode)
				}
				return fmt.Errorf("failed to initialize AI provider: %w", err)
			}

			handler, err := query.NewQueryHandlerWithProvider(conns, llmProvider, aiOptions, systemPrompt)
			if err != nil {
				if errorCodeOnly {
					os.Exit(query.ErrInitializationCode)
				}
				return fmt.Errorf("failed to initialize query: %w", err)
			}

			handler.SetMaxFollowUpAttempts(maxToolFollowUp)

			if contextContent != "" {
				handler.AddContext(contextContent)
			}

			if maxTokens > 0 {
				handler.SetMaxTokens(maxTokens)
			}

			result, err = handler.Execute(question)
			if err != nil {

				if errorCodeOnly {
					exitCode := query.GetExitCode(err)
					os.Exit(exitCode)
				}
				return fmt.Errorf("query failed: %w", err)
			}

			return nil
		}, configFile, serverNames, userSpecified, commandOptions)

		if err != nil {
			return err
		}

		if result != nil && len(result.ToolCalls) > 0 {

			applyRawDataOutput := rawDataOutput

			for _, conn := range result.ServerConnections {
				if serverRawDataOverride[conn] {
					applyRawDataOutput = true
					break
				}
			}

			if applyRawDataOutput {
				rawData := extractRawData(result.ToolCalls)
				if rawData != "" {

					result.Response = rawData
				}
			}
		}

		if result != nil {
			if jsonOutput {

				jsonData, err := json.MarshalIndent(result, "", "  ")
				if err != nil {
					if errorCodeOnly {
						os.Exit(query.ErrOutputFormatCode)
					}
					return fmt.Errorf("failed to format JSON response: %w", err)
				}

				if outputFile != "" {
					err = ioutil.WriteFile(outputFile, jsonData, 0644)
					if err != nil {
						if errorCodeOnly {
							os.Exit(query.ErrOutputWriteCode)
						}
						return fmt.Errorf("failed to write output file: %w", err)
					}
				} else {
					fmt.Println(string(jsonData))
				}
			} else {

				if outputFile != "" {
					err = ioutil.WriteFile(outputFile, []byte(result.Response), 0644)
					if err != nil {
						if errorCodeOnly {
							os.Exit(query.ErrOutputWriteCode)
						}
						return fmt.Errorf("failed to write output file: %w", err)
					}
				} else {

					writer := output.NewWriter()
					defer writer.Close()
					writer.Println(result.Response)
				}
			}
		}

		return nil
	},
}

QueryCmd represents the query command

View Source
var (

	// RootCmd represents the base command when called without any subcommands
	RootCmd = &cobra.Command{
		Use:   "mcp-cli",
		Short: "MCP Command-Line Tool - Interact with AI models and MCP servers",
		Long: `================================================================================
                          MCP Command-Line Tool
          Protocol-level CLI for Model Context Provider servers
================================================================================

A versatile command-line interface for interacting with AI models through the
Model Context Protocol (MCP). Supports multiple AI providers, workflow templates,
embeddings generation, and can run as an MCP server itself.

+----------------------------------------------------------------------------+
| First Time Setup                                                           |
+----------------------------------------------------------------------------+
| mcp-cli init --quick         Quick setup (30 seconds)                     |
| mcp-cli init                 Interactive guided setup                     |
| mcp-cli init --full          Complete setup with all options              |
+----------------------------------------------------------------------------+

+----------------------------------------------------------------------------+
| Basic Usage                                                                |
+----------------------------------------------------------------------------+
| mcp-cli                      Start interactive chat (default)             |
| mcp-cli chat                 Explicitly start chat mode                   |
| mcp-cli query "question"     Ask a single question                        |
| mcp-cli interactive          Interactive mode with slash commands         |
+----------------------------------------------------------------------------+

+----------------------------------------------------------------------------+
| Workflow Templates                                                         |
+----------------------------------------------------------------------------+
| Templates chain multiple AI requests with different providers and pass    |
| data between steps for complex, automated workflows.                      |
|                                                                            |
| mcp-cli --list-templates                List available templates          |
| mcp-cli --template analyze              Run 'analyze' template            |
| mcp-cli --template analyze --input-data "data"  With input data           |
| echo "data" | mcp-cli --template analyze        From stdin                |
+----------------------------------------------------------------------------+

+----------------------------------------------------------------------------+
| MCP Server Mode                                                            |
+----------------------------------------------------------------------------+
| Run mcp-cli as an MCP server, exposing workflow templates as callable     |
| tools that other applications (like Claude Desktop) can use.              |
|                                                                            |
| mcp-cli serve config/runas/agent.yaml   Start MCP server                  |
| mcp-cli serve --verbose agent.yaml      With detailed logging             |
+----------------------------------------------------------------------------+

+----------------------------------------------------------------------------+
| Embeddings & Vector Search                                                 |
+----------------------------------------------------------------------------+
| mcp-cli embeddings "text"                    Generate embeddings          |
| mcp-cli embeddings --input-file doc.txt      From file                    |
| mcp-cli embeddings --model text-embedding-3-large  Specific model         |
| echo "text" | mcp-cli embeddings             From stdin                   |
+----------------------------------------------------------------------------+

+----------------------------------------------------------------------------+
| Configuration                                                              |
+----------------------------------------------------------------------------+
| mcp-cli config validate                      Validate configuration       |
| mcp-cli config --help                        See all config commands      |
+----------------------------------------------------------------------------+`,
		PersistentPreRun: func(cmd *cobra.Command, args []string) {

			cmdName := cmd.Name()
			if cmdName == "init" || cmdName == "help" || cmdName == "completion" || cmdName == "serve" {
				return
			}

			checkConfigExists(configFile)

			// Determine output configuration based on command and flags
			var outputConfig *models.OutputConfig

			isQueryCommand := cmd.Name() == "query"
			isTemplateMode := templateName != ""
			isEmbeddingsCommand := cmd.Name() == "embeddings"

			if verbose {

				outputConfig = models.NewVerboseOutputConfig()
			} else if isQueryCommand || isTemplateMode || isEmbeddingsCommand {

				outputConfig = models.NewQuietOutputConfig()
			} else {

				outputConfig = models.NewDefaultOutputConfig()
			}

			if noColor {
				outputConfig.ShowColors = false
			}

			outputManager := output.NewManager(outputConfig)
			output.SetGlobalManager(outputManager)

			configureLegacyLogging(outputConfig)

			if providerName == "" {
				configService := config.NewService()
				if appConfig, err := configService.LoadConfig(configFile); err == nil {
					if appConfig.AI != nil && appConfig.AI.DefaultProvider != "" {
						providerName = appConfig.AI.DefaultProvider
						logging.Debug("Using default provider from config: %s", providerName)
					}
				}
			}
		},

		Run: func(cmd *cobra.Command, args []string) {

			if listTemplates {
				if err := executeListTemplates(); err != nil {
					logging.Error("Failed to list templates: %v", err)
					os.Exit(1)
				}
				return
			}

			if templateName != "" {
				if err := executeTemplate(); err != nil {
					logging.Error("Template execution failed: %v", err)
					os.Exit(1)
				}
				return
			}

			if err := ChatCmd.RunE(cmd, args); err != nil {
				os.Exit(1)
			}
		},
	}
)
View Source
var ServeCmd = &cobra.Command{
	Use:   "serve [runas-config]",
	Short: "Run as an MCP server exposing workflow templates as tools",
	Long: `Serve mode runs mcp-cli as an MCP server, exposing your workflow templates
as callable MCP tools that other applications can use.

This allows applications like Claude Desktop, IDEs, or other MCP clients to:
  • Execute your custom workflow templates as tools
  • Chain multiple AI operations together
  • Access your configured AI providers and MCP servers

The serve command requires a "runas" configuration file that defines:
  • Server name and version
  • Which templates to expose as tools
  • Input/output mappings for each tool
  • Optional provider/model overrides

Example usage:
  # Start MCP server with specific config
  mcp-cli serve config/runas/research_agent.yaml
  
  # With verbose logging for debugging
  mcp-cli serve --verbose config/runas/code_reviewer.yaml
  
  # Using the --serve flag
  mcp-cli --serve config/runas/data_analyst.yaml

Claude Desktop Configuration:
  Add to your Claude Desktop config (claude_desktop_config.json):
  
  {
    "mcpServers": {
      "research-agent": {
        "command": "/path/to/mcp-cli",
        "args": ["serve", "/path/to/config/runas/research_agent.yaml"]
      }
    }
  }`,
	Args: cobra.MaximumNArgs(1),
	RunE: func(cmd *cobra.Command, args []string) error {

		runasConfigPath := serveConfig
		if len(args) > 0 {
			runasConfigPath = args[0]
		}

		if runasConfigPath == "" {
			return fmt.Errorf("runas config file is required")
		}

		if !verbose {
			logging.SetDefaultLevel(logging.ERROR)
		}

		logging.Info("Starting MCP server mode with config: %s", runasConfigPath)

		runasLoader := runas.NewLoader()
		runasConfig, created, err := runasLoader.LoadOrDefault(runasConfigPath)
		if err != nil {
			return fmt.Errorf("failed to load runas config: %w", err)
		}

		if created {
			fmt.Fprintf(os.Stderr, "Created example runas config at: %s\n", runasConfigPath)
			fmt.Fprintf(os.Stderr, "Please edit the file to configure your MCP server.\n")
			return nil
		}

		logging.Info("Loaded runas config: %s", runasConfig.ServerInfo.Name)

		actualConfigFile := configFile
		if actualConfigFile == "config.yaml" {

			exePath, err := os.Executable()
			if err != nil {
				return fmt.Errorf("failed to determine executable path: %w", err)
			}
			exeDir := filepath.Dir(exePath)
			actualConfigFile = filepath.Join(exeDir, "config.yaml")
			logging.Info("Using config file: %s", actualConfigFile)
		}

		configService := infraConfig.NewService()
		appConfig, err := configService.LoadConfig(actualConfigFile)
		if err != nil {
			return fmt.Errorf("failed to load application config from %s: %w", actualConfigFile, err)
		}

		for i, tool := range runasConfig.Tools {
			_, existsV1 := appConfig.Templates[tool.Template]
			_, existsV2 := appConfig.TemplatesV2[tool.Template]

			if !existsV1 && !existsV2 {
				return fmt.Errorf("tool %d (%s) references unknown template: %s",
					i, tool.Name, tool.Template)
			}
		}

		service := serverService.NewService(runasConfig, appConfig, configService)

		stdioServer := server.NewStdioServer(service)

		logging.Info("MCP server starting...")
		if err := stdioServer.Start(); err != nil {
			return fmt.Errorf("server error: %w", err)
		}

		return nil
	},
}

ServeCmd represents the serve command

View Source
var VersionCmd = &cobra.Command{
	Use:   "version",
	Short: "Print version information",
	Long:  `Print detailed version information including build time and git commit.`,
	Run: func(cmd *cobra.Command, args []string) {
		fmt.Printf("mcp-cli version %s\n", Version)
		fmt.Printf("Built: %s\n", BuildTime)
		fmt.Printf("Commit: %s\n", GitCommit)
	},
}

VersionCmd represents the version command

Functions

func Execute

func Execute() error

Execute adds all child commands to the root command and sets flags appropriately.

func ProcessOptions

func ProcessOptions(configFile, serverFlag string, disableFilesystem bool, provider string, model string) ([]string, map[string]bool)

ProcessOptions processes command-line options and returns the server names

Types

type HostServerAdapter

type HostServerAdapter struct {
	// contains filtered or unexported fields
}

HostServerAdapter adapts host.ServerConnection to domain.MCPServer interface

func (*HostServerAdapter) ExecuteTool

func (hsa *HostServerAdapter) ExecuteTool(ctx context.Context, toolName string, arguments map[string]interface{}) (string, error)

func (*HostServerAdapter) GetConfig

func (hsa *HostServerAdapter) GetConfig() *config.ServerConfig

func (*HostServerAdapter) GetServerName

func (hsa *HostServerAdapter) GetServerName() string

func (*HostServerAdapter) GetTools

func (hsa *HostServerAdapter) GetTools() ([]domain.Tool, error)

func (*HostServerAdapter) IsRunning

func (hsa *HostServerAdapter) IsRunning() bool

func (*HostServerAdapter) Start

func (hsa *HostServerAdapter) Start(ctx context.Context) error

func (*HostServerAdapter) Stop

func (hsa *HostServerAdapter) Stop() error

type HostServerManager

type HostServerManager struct {
	// contains filtered or unexported fields
}

HostServerManager adapts host.ServerConnection to domain.MCPServerManager interface

func NewHostServerManager

func NewHostServerManager(connections []*host.ServerConnection) *HostServerManager

func (*HostServerManager) ExecuteTool

func (hsm *HostServerManager) ExecuteTool(ctx context.Context, toolName string, arguments map[string]interface{}) (string, error)

func (*HostServerManager) GetAvailableTools

func (hsm *HostServerManager) GetAvailableTools() ([]domain.Tool, error)

func (*HostServerManager) GetServer

func (hsm *HostServerManager) GetServer(serverName string) (domain.MCPServer, bool)

func (*HostServerManager) ListServers

func (hsm *HostServerManager) ListServers() map[string]domain.MCPServer

func (*HostServerManager) StartServer

func (hsm *HostServerManager) StartServer(ctx context.Context, serverName string, cfg *config.ServerConfig) (domain.MCPServer, error)

func (*HostServerManager) StopAll

func (hsm *HostServerManager) StopAll() error

func (*HostServerManager) StopServer

func (hsm *HostServerManager) StopServer(serverName string) error

type InitConfig

type InitConfig struct {
	Providers           []string
	Servers             []string
	IncludeOllama       bool
	IncludeOpenAI       bool
	IncludeAnthropic    bool
	IncludeDeepSeek     bool
	IncludeGemini       bool
	IncludeOpenRouter   bool
	IncludeLMStudio     bool
	IncludeBedrock      bool
	IncludeAzureFoundry bool
	IncludeVertexAI     bool
	DefaultProvider     string
}

InitConfig holds configuration choices

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL