8
n8n 中文网amn8n.com

使用Llama 3.2、RAG和搜索构建本地AI助手(Ollama和MCP)

中级

这是一个Support Chatbot, AI RAG领域的自动化工作流,包含 9 个节点。主要使用 McpClientTool, Agent, ChatTrigger, LmChatOllama, McpClientTool 等节点。 使用Llama 3.2、RAG和搜索构建本地AI助手(Ollama和MCP)

前置要求
  • 无特殊前置要求,导入即可使用
工作流预览
可视化展示节点连接关系,支持缩放和平移
导出工作流
复制以下 JSON 配置到 n8n 导入,即可使用此工作流
{
  "meta": {
    "instanceId": "558d88703fb65b2d0e44613bc35916258b0f0bf983c5d4730c00c424b77ca36a",
    "templateCredsSetupCompleted": true
  },
  "nodes": [
    {
      "id": "cb29d79a-40dc-4077-8810-45c695229609",
      "name": "当收到聊天消息时",
      "type": "@n8n/n8n-nodes-langchain.chatTrigger",
      "position": [
        0,
        0
      ],
      "webhookId": "349f7ccf-6700-42b1-8137-fdde62c4bdfa",
      "parameters": {
        "options": {}
      },
      "typeVersion": 1.1
    },
    {
      "id": "c090dbad-f2dc-4db5-84c0-0f96ab5fb922",
      "name": "AI Agent",
      "type": "@n8n/n8n-nodes-langchain.agent",
      "position": [
        220,
        0
      ],
      "parameters": {
        "options": {
          "systemMessage": "You are a helpful assistant. You have access to two MCP Servers. One which has access to a RAG Database, and one which has access to a tool to search google.\n\nWhen you get a question about current events, you use the search engine MCP Server to fetch information.\n\nWhen people ask more general questions, you use your RAG Database"
        }
      },
      "typeVersion": 2
    },
    {
      "id": "8688fdd5-7cb6-4d45-bda5-39c31f24ffcd",
      "name": "Ollama 聊天模型",
      "type": "@n8n/n8n-nodes-langchain.lmChatOllama",
      "position": [
        -120,
        240
      ],
      "parameters": {
        "options": {}
      },
      "credentials": {
        "ollamaApi": {
          "id": "xHuYe0MDGOs9IpBW",
          "name": "Local Ollama service"
        }
      },
      "typeVersion": 1
    },
    {
      "id": "2db79ee1-07c3-49bc-863a-129066a4c758",
      "name": "简单记忆",
      "type": "@n8n/n8n-nodes-langchain.memoryBufferWindow",
      "position": [
        60,
        300
      ],
      "parameters": {},
      "typeVersion": 1.3
    },
    {
      "id": "c82053cf-99f2-40cb-8c07-eac3f199f7b2",
      "name": "MCP客户端:RAG",
      "type": "@n8n/n8n-nodes-langchain.mcpClientTool",
      "position": [
        360,
        300
      ],
      "parameters": {
        "sseEndpoint": "http://localhost:5678/mcp/8d7910ab-f0db-4042-9da9-f580647a8a8e"
      },
      "typeVersion": 1
    },
    {
      "id": "020a7821-5c15-48cb-a5bd-1d3251130b80",
      "name": "便签",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        280,
        220
      ],
      "parameters": {
        "color": 3,
        "height": 240,
        "content": "## MCP客户端:RAG"
      },
      "typeVersion": 1
    },
    {
      "id": "3d0b7c23-18f4-49ec-bb2a-4192959250e8",
      "name": "便签1",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        600,
        220
      ],
      "parameters": {
        "color": 6,
        "width": 400,
        "height": 240,
        "content": "## MCP客户端:Bright Data"
      },
      "typeVersion": 1
    },
    {
      "id": "c41ee2cf-73ef-4f38-bc0b-36ff5091d18c",
      "name": "MCP客户端:BD_工具",
      "type": "n8n-nodes-mcp.mcpClientTool",
      "position": [
        680,
        300
      ],
      "parameters": {},
      "credentials": {
        "mcpClientApi": {
          "id": "gtICJ1VBUVNpQahr",
          "name": "MCP Client (STDIO) account"
        }
      },
      "typeVersion": 1
    },
    {
      "id": "85a32e4d-02c0-4811-a33d-3d77f4705f35",
      "name": "MCP客户端:BD_执行",
      "type": "n8n-nodes-mcp.mcpClientTool",
      "position": [
        840,
        300
      ],
      "parameters": {
        "toolName": "search_engine",
        "operation": "executeTool",
        "toolParameters": "={{ /*n8n-auto-generated-fromAI-override*/ $fromAI('Tool_Parameters', ``, 'json') }}"
      },
      "credentials": {
        "mcpClientApi": {
          "id": "gtICJ1VBUVNpQahr",
          "name": "MCP Client (STDIO) account"
        }
      },
      "typeVersion": 1
    }
  ],
  "pinData": {},
  "connections": {
    "Simple Memory": {
      "ai_memory": [
        [
          {
            "node": "AI Agent",
            "type": "ai_memory",
            "index": 0
          }
        ]
      ]
    },
    "MCP Client: RAG": {
      "ai_tool": [
        [
          {
            "node": "AI Agent",
            "type": "ai_tool",
            "index": 0
          }
        ]
      ]
    },
    "Ollama Chat Model": {
      "ai_languageModel": [
        [
          {
            "node": "AI Agent",
            "type": "ai_languageModel",
            "index": 0
          }
        ]
      ]
    },
    "MCP Client:BD_Tools": {
      "ai_tool": [
        [
          {
            "node": "AI Agent",
            "type": "ai_tool",
            "index": 0
          }
        ]
      ]
    },
    "MCP Client:BD_Execute": {
      "ai_tool": [
        [
          {
            "node": "AI Agent",
            "type": "ai_tool",
            "index": 0
          }
        ]
      ]
    },
    "When chat message received": {
      "main": [
        [
          {
            "node": "AI Agent",
            "type": "main",
            "index": 0
          }
        ]
      ]
    }
  }
}
常见问题

如何使用这个工作流?

复制上方的 JSON 配置代码,在您的 n8n 实例中创建新工作流并选择「从 JSON 导入」,粘贴配置后根据需要修改凭证设置即可。

这个工作流适合什么场景?

中级 - 客服机器人, AI RAG 检索增强

需要付费吗?

本工作流完全免费,您可以直接导入使用。但请注意,工作流中使用的第三方服务(如 OpenAI API)可能需要您自行付费。

工作流信息
难度等级
中级
节点数量9
分类2
节点类型7
难度说明

适合有一定经验的用户,包含 6-15 个节点的中等复杂度工作流

外部链接
在 n8n.io 查看

分享此工作流