| |
---|---|
LlmClient | ↔ mcp_client |
↕ | ↕ |
LLM Provider (Claude, GPT) | MCP Server (Các công cụ, tài nguyên) |
.env
tại thư mục gốc chứa:CLAUDE_API_KEY=your-claude-api-keyMCP_SERVER_URL=http://localhost:8999/sseMCP_AUTH_TOKEN=your-auth-token
pubspec.yaml
để nhận diện file .env
:flutter: assets: - .env
import 'package:mcp_llm/mcp_llm.dart';import 'package:mcp_client/mcp_client.dart' as mcp;
Future<void> setupIntegration() async { final mcpLlm = McpLlm(); mcpLlm.registerProvider('claude', ClaudeProviderFactory());
final mcpClient = mcp.McpClient.createClient( name: 'flutter_app', version: '1.0.0', capabilities: mcp.ClientCapabilities( roots: true, rootsListChanged: true, sampling: true, ), );
final transport = await mcp.McpClient.createSseTransport( serverUrl: 'http://localhost:8999/sse', headers: {'Authorization': 'Bearer your_token'}, );
await mcpClient.connectWithRetry( transport, maxRetries: 3, delay: const Duration(seconds: 2), );
final llmClient = await mcpLlm.createClient( providerName: 'claude', config: LlmConfiguration( apiKey: 'your-claude-api-key', model: 'claude-3-haiku-20240307', ), mcpClient: mcpClient, systemPrompt: 'You are a helpful assistant with access to various tools.', );
// Sử dụng llmClient để tương tác AI và các công cụ}
mcpClient.onNotification('connection_state_changed', (params) { final state = params['state'] as String; print('MCP connection state: $state'); // Cập nhật UI hoặc xử lý logic khi trạng thái thay đổi});
Future<void> listAvailableTools() async { final tools = await mcpClient.listTools(); print('Available tools:'); for (final tool in tools) { print('- ${tool.name}: ${tool.description}'); }}
Future<void> chatWithToolUse() async { final response = await llmClient.chat( "What's the weather in New York today?", enableTools: true, ); print('AI Response: ${response.text}');}
Future<void> executeToolDirectly() async { try { final result = await llmClient.executeTool( 'weather', {'location': 'New York', 'unit': 'celsius'}, ); print('Tool execution result: $result'); } catch (e) { print('Tool execution error: $e'); }}
Future<void> streamChatWithToolUse() async { final responseStream = llmClient.streamChat( "Tell me the weather in New York and San Francisco", enableTools: true, ); final StringBuffer currentResponse = StringBuffer(); await for (final chunk in responseStream) { if (chunk['textChunk'] != null && chunk['textChunk'].isNotEmpty) { currentResponse.write(chunk['textChunk']); print('Current response: ${currentResponse.toString()}'); } if (chunk['isDone'] == true) { print('Response stream completed'); } }}
Future<void> listAvailableResources() async { final resources = await mcpClient.listResources(); print('Available resources:'); for (final resource in resources) { print('- ${resource.name}: ${resource.description}'); print(' URI: ${resource.uri}'); }}
Future<void> readResource() async { try { final resourceContent = await mcpClient.readResource('company_data'); print('Resource content: $resourceContent'); final response = await llmClient.chat( "Analyze this company data: $resourceContent", ); print('AI Analysis: ${response.text}'); } catch (e) { print('Resource reading error: $e'); }}
Future<void> getResourceWithTemplate() async { try { final result = await mcpClient.getResourceWithTemplate( 'files://project/{filename}', {'filename': 'config.json'}, ); print('Template resource result: $result'); } catch (e) { print('Template resource error: $e'); }}
final llmClient = await mcpLlm.createClient( providerName: 'claude', config: LlmConfiguration( apiKey: 'your-claude-api-key', model: 'claude-3-haiku-20240307', ), mcpClients: { 'tools': toolsClient, 'resources': resourcesClient, }, systemPrompt: 'You are a helpful assistant with access to various tools and resources.',);
// Đoạn mã dài trong phần đầu vào, đảm nhận kết nối và thao tác với LlmClient và mcp_client
/stream
và /tool