Skip to content

Commit f6dfcc9

Browse files
committed
update
1 parent c5fc081 commit f6dfcc9

File tree

2 files changed

+269
-15
lines changed

2 files changed

+269
-15
lines changed

README.md

+110
Original file line numberDiff line numberDiff line change
@@ -352,6 +352,116 @@ Once started, you can access the interface by opening `http://localhost:41999` (
352352
<img src=".github/resources/web-interface.png" alt="MCP Web Interface" width="700">
353353
</p>
354354

355+
### LLM Integration
356+
357+
MCP Tools includes a powerful LLM integration that enables AI models to interact with MCP servers through natural language. The LLM command creates an interactive chat session where AI can discover and use tools from one or more MCP servers on your behalf.
358+
359+
```bash
360+
# Basic usage with a single MCP server
361+
mcp llm npx -y @modelcontextprotocol/server-filesystem ~
362+
363+
# Use multiple servers for expanded capabilities
364+
mcp llm -M "npx -y @modelcontextprotocol/server-filesystem ~" -M "https://ne.tools"
365+
366+
# Specify a provider and model
367+
mcp llm --provider anthropic --model claude-3-opus-20240229
368+
```
369+
370+
#### Features
371+
372+
- **Multi-Provider Support**: Works with major LLM providers:
373+
- OpenAI (default) - Uses API key from `OPENAI_API_KEY`
374+
- Anthropic - Uses API key from `ANTHROPIC_API_KEY`
375+
376+
- **Multiple Server Integration**: Connect up to 3 MCP servers simultaneously:
377+
```bash
378+
mcp llm -M "server1" -M "server2" -M "server3"
379+
```
380+
Tools are automatically prefixed with server IDs (s1_, s2_, s3_) to avoid naming conflicts.
381+
382+
- **Tool Execution**: LLMs can:
383+
- Discover available tools across all connected servers
384+
- Call tools with proper parameters
385+
- Receive and process tool results
386+
- Make multiple tool calls in a single turn
387+
388+
- **Server Aliases**: Use server aliases for simpler commands:
389+
```bash
390+
# Using aliases for servers
391+
mcp llm -M fs-server -M github-server
392+
```
393+
394+
#### Example Session
395+
396+
```
397+
mcp > MCP LLM Shell
398+
mcp > Connecting to server 1: npx -y @modelcontextprotocol/server-filesystem ~
399+
mcp > Server 1: Registered 8 tools
400+
mcp > Using provider: openai, model: gpt-4o
401+
mcp > Total registered tools: 8
402+
mcp > Type 'exit' to quit
403+
404+
user > What files are in my current directory?
405+
406+
agent > I'll check the files in your current directory.
407+
408+
[Calling s1_list_dir]
409+
mcp > [Server 1 running list_dir with params {"path":"."}]
410+
{
411+
"entries": [
412+
{
413+
"name": "README.md",
414+
"type": "file",
415+
"size": 12345,
416+
"modTime": "2023-05-01T12:34:56Z"
417+
},
418+
{
419+
"name": "src",
420+
"type": "directory"
421+
}
422+
]
423+
}
424+
425+
In your current directory, you have:
426+
1. README.md (file, 12.1 KB)
427+
2. src (directory)
428+
429+
user > Show me the contents of README.md
430+
431+
agent > I'll show you the contents of README.md.
432+
433+
[Calling s1_read_file]
434+
mcp > [Server 1 running read_file with params {"path":"README.md"}]
435+
{
436+
"content": "# My Project\nThis is a sample README file."
437+
}
438+
439+
Here's the content of README.md:
440+
441+
# My Project
442+
This is a sample README file.
443+
```
444+
445+
#### Configuration Options
446+
447+
```bash
448+
# Provider selection
449+
mcp llm --provider openai # Default
450+
mcp llm --provider anthropic
451+
452+
# Model selection
453+
mcp llm --model gpt-4o # Default for OpenAI
454+
mcp llm --model claude-3.7-sonnet # Default for Anthropic
455+
456+
# API key override (otherwise uses environment variables)
457+
mcp llm --api-key "your-api-key-here"
458+
459+
# Display options
460+
mcp llm --no-color # Disable colored output
461+
```
462+
463+
The LLM integration is designed to make AI-driven workflow automation with MCP tools intuitive and powerful, allowing models to perform complex tasks by combining available tools through natural language requests.
464+
355465
### Project Scaffolding
356466

357467
MCP Tools provides a scaffolding feature to quickly create new MCP servers with TypeScript:

cmd/mcptools/commands/llm.go

+159-15
Original file line numberDiff line numberDiff line change
@@ -59,19 +59,27 @@ func LLMCmd() *cobra.Command {
5959
var noColorFlag bool
6060

6161
cmd := &cobra.Command{
62-
Use: "llm [command args...]",
62+
Use: "llm [-- command args...]",
6363
Short: "Start an interactive shell with LLM integration",
6464
Long: `Start an interactive shell with LLM integration.
6565
This command connects to an LLM provider and provides a chat interface.
6666
The LLM can execute MCP tools on your behalf.
6767
6868
Example usage:
69-
mcp llm npx -y @modelcontextprotocol/server-filesystem ~
69+
mcp llm -- npx -y @modelcontextprotocol/server-filesystem ~
7070
mcp llm -M https://ne.tools -M "npx -y @modelcontextprotocol/server-filesystem ~"
71-
mcp llm --provider anthropic --model claude-3-opus-20240229`,
72-
DisableFlagParsing: false,
71+
mcp llm --provider anthropic --model claude-3-7-sonnet-20250219
72+
73+
Note: When specifying a server command directly, use -- to separate MCP flags from the server command.`,
74+
DisableFlagParsing: true,
7375
SilenceUsage: true,
7476
RunE: func(thisCmd *cobra.Command, args []string) error {
77+
// Process the args manually to separate flags from positional args
78+
processedArgs, err := processArgs(args, &providerFlag, &modelFlag, &apiKeyFlag, &multiServerFlags, &noColorFlag)
79+
if err != nil {
80+
return err
81+
}
82+
7583
// If no-color flag is set, disable colors
7684
if noColorFlag {
7785
color.NoColor = true
@@ -98,15 +106,15 @@ Example usage:
98106
serverCommands = append(serverCommands, client.ParseCommandString(serverFlag))
99107
}
100108
}
101-
} else if len(args) > 0 {
109+
} else if len(processedArgs) > 0 {
102110
// Legacy mode - use positional args for the server command
103111
// Check if it's an alias
104-
if aliasCmd, found := alias.GetServerCommand(args[0]); found && len(args) == 1 {
112+
if aliasCmd, found := alias.GetServerCommand(processedArgs[0]); found && len(processedArgs) == 1 {
105113
// It's an alias, use the command from the alias
106114
serverCommands = append(serverCommands, client.ParseCommandString(aliasCmd))
107115
} else {
108116
// Not an alias, use the args directly
109-
serverCommands = append(serverCommands, args)
117+
serverCommands = append(serverCommands, processedArgs)
110118
}
111119
}
112120

@@ -128,7 +136,7 @@ Example usage:
128136
case LLMProviderOpenAI:
129137
model = "gpt-4o"
130138
case LLMProviderAnthropic:
131-
model = "claude-3-opus-20240229"
139+
model = "claude-3-7-sonnet-20250219"
132140
}
133141
}
134142

@@ -304,12 +312,6 @@ Be concise, accurate, and helpful.`, len(serverCommands)),
304312
},
305313
}
306314

307-
cmd.Flags().StringVar(&providerFlag, "provider", "", "LLM provider (openai, anthropic, mistral)")
308-
cmd.Flags().StringVar(&modelFlag, "model", "", "The model to use")
309-
cmd.Flags().StringVar(&apiKeyFlag, "api-key", "", "API key for the LLM provider")
310-
cmd.Flags().StringArrayVarP(&multiServerFlags, "multi", "M", nil, "Multiple server commands to connect to (max 3)")
311-
cmd.Flags().BoolVar(&noColorFlag, "no-color", false, "Disable colored output")
312-
313315
return cmd
314316
}
315317

@@ -890,13 +892,46 @@ func callAnthropicWithTools(model, apiKey string, messages []map[string]interfac
890892
// Format tools for Anthropic
891893
anthropicTools := []map[string]interface{}{}
892894
for _, tool := range tools {
895+
// Skip tools without parameters for Anthropic
896+
if tool.Parameters == nil {
897+
serverColor.Fprintf(color.Output, "Skipping tool %s for Anthropic (no parameters)\n", tool.Name)
898+
continue
899+
}
900+
901+
// Verify we have properties to work with
902+
props, hasProps := tool.Parameters["properties"].(map[string]interface{})
903+
if !hasProps || len(props) == 0 {
904+
serverColor.Fprintf(color.Output, "Skipping tool %s for Anthropic (no properties)\n", tool.Name)
905+
continue
906+
}
907+
908+
// Format the parameters specifically for Anthropic's expected schema structure
909+
inputSchema := map[string]interface{}{
910+
"type": "object",
911+
"properties": props,
912+
}
913+
914+
// Add required field if present
915+
if required, ok := tool.Parameters["required"].([]string); ok && len(required) > 0 {
916+
inputSchema["required"] = required
917+
}
918+
893919
anthropicTools = append(anthropicTools, map[string]interface{}{
894920
"name": tool.Name,
895921
"description": tool.Description,
896-
"input_schema": tool.Parameters,
922+
"input_schema": inputSchema,
897923
})
898924
}
899925

926+
// Note how many tools we included
927+
serverColor.Fprintf(color.Output, "Registered %d tools for Anthropic (out of %d total)\n", len(anthropicTools), len(tools))
928+
929+
// Debug: Log the first tool format if any exist
930+
if len(anthropicTools) > 0 {
931+
debugBytes, _ := json.MarshalIndent(anthropicTools[0], "", " ")
932+
serverColor.Fprintf(color.Output, "Sample Anthropic tool format: %s\n", string(debugBytes))
933+
}
934+
900935
// Prepare request
901936
requestData := map[string]interface{}{
902937
"model": model,
@@ -1213,3 +1248,112 @@ func convertMessages(messages []map[string]interface{}) []map[string]interface{}
12131248
}
12141249
return convertedMessages
12151250
}
1251+
1252+
// processArgs manually processes the arguments to separate flags from positional args
1253+
func processArgs(args []string, providerFlag, modelFlag, apiKeyFlag *string, multiServerFlags *[]string, noColorFlag *bool) ([]string, error) {
1254+
var processedArgs []string
1255+
1256+
// Check for -- separator
1257+
dashDashIndex := -1
1258+
for i, arg := range args {
1259+
if arg == "--" {
1260+
dashDashIndex = i
1261+
break
1262+
}
1263+
}
1264+
1265+
// If -- separator is found, treat everything after it as positional args
1266+
if dashDashIndex != -1 {
1267+
// Process flags before the separator
1268+
flagArgs := args[:dashDashIndex]
1269+
if err := parseFlags(flagArgs, providerFlag, modelFlag, apiKeyFlag, multiServerFlags, noColorFlag); err != nil {
1270+
return nil, err
1271+
}
1272+
1273+
// Everything after -- is treated as server command
1274+
if dashDashIndex+1 < len(args) {
1275+
processedArgs = args[dashDashIndex+1:]
1276+
}
1277+
return processedArgs, nil
1278+
}
1279+
1280+
// No -- separator, try to auto-detect flags vs positional args
1281+
var flagArgs []string
1282+
i := 0
1283+
for i < len(args) {
1284+
arg := args[i]
1285+
1286+
// If argument starts with - or --, it's a flag
1287+
if strings.HasPrefix(arg, "-") {
1288+
flagArgs = append(flagArgs, arg)
1289+
1290+
// Check if the flag needs a value
1291+
if arg == "-M" || arg == "--multi" ||
1292+
arg == "--provider" || arg == "--model" ||
1293+
arg == "--api-key" {
1294+
if i+1 < len(args) {
1295+
flagArgs = append(flagArgs, args[i+1])
1296+
i += 2
1297+
continue
1298+
}
1299+
}
1300+
i++
1301+
continue
1302+
}
1303+
1304+
// If we reach here, it's not a flag, so it must be the start of the server command
1305+
break
1306+
}
1307+
1308+
// Parse the flags we've collected
1309+
if err := parseFlags(flagArgs, providerFlag, modelFlag, apiKeyFlag, multiServerFlags, noColorFlag); err != nil {
1310+
return nil, err
1311+
}
1312+
1313+
// Everything else is treated as the server command
1314+
if i < len(args) {
1315+
processedArgs = args[i:]
1316+
}
1317+
1318+
return processedArgs, nil
1319+
}
1320+
1321+
// parseFlags parses the flag arguments and sets the corresponding values
1322+
func parseFlags(flagArgs []string, providerFlag, modelFlag, apiKeyFlag *string, multiServerFlags *[]string, noColorFlag *bool) error {
1323+
for i := 0; i < len(flagArgs); i++ {
1324+
switch flagArgs[i] {
1325+
case "--provider":
1326+
if i+1 < len(flagArgs) {
1327+
*providerFlag = flagArgs[i+1]
1328+
i++
1329+
} else {
1330+
return fmt.Errorf("--provider requires a value")
1331+
}
1332+
case "--model":
1333+
if i+1 < len(flagArgs) {
1334+
*modelFlag = flagArgs[i+1]
1335+
i++
1336+
} else {
1337+
return fmt.Errorf("--model requires a value")
1338+
}
1339+
case "--api-key":
1340+
if i+1 < len(flagArgs) {
1341+
*apiKeyFlag = flagArgs[i+1]
1342+
i++
1343+
} else {
1344+
return fmt.Errorf("--api-key requires a value")
1345+
}
1346+
case "-M", "--multi":
1347+
if i+1 < len(flagArgs) {
1348+
*multiServerFlags = append(*multiServerFlags, flagArgs[i+1])
1349+
i++
1350+
} else {
1351+
return fmt.Errorf("--multi requires a value")
1352+
}
1353+
case "--no-color":
1354+
*noColorFlag = true
1355+
}
1356+
}
1357+
1358+
return nil
1359+
}

0 commit comments

Comments
 (0)