{
  "name": "Compare sequential, agent-based, and parallel LLM processing with Claude 3.7",
  "nodes": [
    {
      "id": "35e53ce7-06b4-47ca-b7f3-b147bd059fcf",
      "name": "When clicking ‘Test workflow’",
      "type": "n8n-nodes-base.manualTrigger",
      "position": [
        200,
        520
      ]
    },
    {
      "id": "aeef734e-1c3b-4a91-93ae-2ae9c50951b8",
      "name": "HTTP Request",
      "type": "n8n-nodes-base.httpRequest",
      "position": [
        400,
        520
      ]
    },
    {
      "id": "7f6b95eb-df8c-4f0f-ba69-6b298d624ccd",
      "name": "Markdown",
      "type": "n8n-nodes-base.markdown",
      "position": [
        600,
        520
      ]
    },
    {
      "id": "994dbe06-4c25-4fb3-a8f3-566eb5b66c6d",
      "name": "Sticky Note",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        160,
        340
      ],
      "parameters": {
        "width": 700,
        "height": 360,
        "content": "# Connect to one of the blue sections -->\n## This can be anything:\n- Chat input\n- Trigger from external system\n- CRON-scheduled event"
      }
    },
    {
      "id": "8ba3039d-dabf-43b7-ab35-117332f65ced",
      "name": "Anthropic Chat Model",
      "type": "@n8n/n8n-nodes-langchain.lmChatAnthropic",
      "position": [
        1460,
        -20
      ]
    },
    {
      "id": "7e1da020-e01d-410c-aa7f-a19d6e1c368d",
      "name": "Anthropic Chat Model1",
      "type": "@n8n/n8n-nodes-langchain.lmChatAnthropic",
      "position": [
        1820,
        -20
      ]
    },
    {
      "id": "620503cb-2d51-4102-8975-75255cf15b1b",
      "name": "Anthropic Chat Model2",
      "type": "@n8n/n8n-nodes-langchain.lmChatAnthropic",
      "position": [
        2180,
        -20
      ]
    },
    {
      "id": "5f0d11ce-c1ea-4c36-8b2d-d3f70b19f0ba",
      "name": "Anthropic Chat Model3",
      "type": "@n8n/n8n-nodes-langchain.lmChatAnthropic",
      "position": [
        2540,
        -20
      ]
    },
    {
      "id": "f973d01e-fad7-4143-8379-54438f5412cb",
      "name": "Merge",
      "type": "n8n-nodes-base.merge",
      "position": [
        2440,
        360
      ]
    },
    {
      "id": "c7e58b90-bc96-421c-88f2-4e9f95f87248",
      "name": "Simple Memory",
      "type": "@n8n/n8n-nodes-langchain.memoryBufferWindow",
      "position": [
        2680,
        780
      ]
    },
    {
      "id": "0e606f7c-2cdb-4e34-8c0b-2303996077fb",
      "name": "Clean memory",
      "type": "@n8n/n8n-nodes-langchain.memoryManager",
      "position": [
        1500,
        480
      ]
    },
    {
      "id": "af0fb574-9964-4f7d-8348-a2cf614b8562",
      "name": "Initial prompts",
      "type": "n8n-nodes-base.set",
      "position": [
        1880,
        480
      ]
    },
    {
      "id": "6743e44a-cc76-4e73-b4f3-ba7c65d179d3",
      "name": "Split Out",
      "type": "n8n-nodes-base.splitOut",
      "position": [
        2240,
        480
      ]
    },
    {
      "id": "caddd26c-ee84-455f-8ee6-aecf21536930",
      "name": "Reshape",
      "type": "n8n-nodes-base.set",
      "position": [
        2060,
        480
      ]
    },
    {
      "id": "bd244988-d074-42f3-af42-960e5aa1d35d",
      "name": "Sticky Note1",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        1840,
        400
      ],
      "parameters": {
        "width": 540,
        "height": 240,
        "content": "# An array of prompts here"
      }
    },
    {
      "id": "7e9e5287-8d4e-43a9-b8cf-ae26a177bfbb",
      "name": "Anthropic Chat Model4",
      "type": "@n8n/n8n-nodes-langchain.lmChatAnthropic",
      "position": [
        2600,
        600
      ]
    },
    {
      "id": "47816a45-b906-47ef-9510-c63867bfc8b7",
      "name": "Merge2",
      "type": "n8n-nodes-base.merge",
      "position": [
        1860,
        1120
      ]
    },
    {
      "id": "e63b89a1-c2ca-4ed2-ae50-e3a7b429609c",
      "name": "Sticky Note2",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        2040,
        1020
      ],
      "parameters": {
        "width": 320,
        "height": 520,
        "content": "## Make sure URL matches\n### ⚠️ Cloud users!\nReplace `{{ $env.WEBHOOK_URL }}` \nwith your n8n instance URL"
      }
    },
    {
      "id": "7b99df1a-bf6c-4cf1-b58a-346873136715",
      "name": "Basic LLM Chain4",
      "type": "@n8n/n8n-nodes-langchain.chainLlm",
      "position": [
        2440,
        1300
      ]
    },
    {
      "id": "6f6e0667-5164-4b65-a796-1d2112c7c072",
      "name": "Split Out1",
      "type": "n8n-nodes-base.splitOut",
      "position": [
        1680,
        1340
      ]
    },
    {
      "id": "9dfd2145-2427-4131-92d2-99aca620217f",
      "name": "Anthropic Chat Model5",
      "type": "@n8n/n8n-nodes-langchain.lmChatAnthropic",
      "position": [
        2420,
        1460
      ]
    },
    {
      "id": "616fc635-107d-4929-b9d6-4ccd34e42909",
      "name": "Webhook",
      "type": "n8n-nodes-base.webhook",
      "position": [
        2140,
        1400
      ]
    },
    {
      "id": "c863252b-f8b6-4704-be4e-a69d3005a85a",
      "name": "CONNECT ME",
      "type": "n8n-nodes-base.noOp",
      "position": [
        1240,
        -220
      ]
    },
    {
      "id": "90ab4402-cbea-4441-9097-558ec72e5d38",
      "name": "CONNECT ME1",
      "type": "n8n-nodes-base.noOp",
      "position": [
        1280,
        340
      ]
    },
    {
      "id": "1c04650f-8043-496f-aeab-866e85548f9d",
      "name": "CONNECT ME2",
      "type": "n8n-nodes-base.noOp",
      "position": [
        1280,
        1100
      ]
    },
    {
      "id": "4097f12d-eba7-477a-9152-da5eb8c9aa03",
      "name": "Sticky Note3",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        960,
        -300
      ],
      "parameters": {
        "width": 1980,
        "height": 440,
        "content": "# 1 - Naive Chaining\n### PROs:\n- Easy to setup\n- Beginner-friendly\n\n### CONs\n- Not scalable\n- Hard to maintain long chains\n- SLOOOW!"
      }
    },
    {
      "id": "ce806bc6-a57e-47da-bbba-4698c3956022",
      "name": "Sticky Note4",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        960,
        240
      ],
      "parameters": {
        "width": 2160,
        "height": 660,
        "content": "# 2 - Iterative Agent Processing\n\n### PROs:\n- Scalable\n- All inputs & outputs in a single node\n- Supports Agent memory\n\n### CONs\n- Still Slow!"
      }
    },
    {
      "id": "49c4507f-de1e-422b-8058-db82668614d3",
      "name": "Sticky Note5",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        960,
        1000
      ],
      "parameters": {
        "width": 1880,
        "height": 600,
        "content": "# 3 - Parallel Processing\n\n### PROs:\n- Scalable\n- All inputs & outputs in a single place\n- FAST!\n\n### CONs\n- Independent requests\n  (no Agent memory)"
      }
    },
    {
      "id": "c30b8132-9291-4855-89ec-6a98bcee8247",
      "name": "Sticky Note6",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        1420,
        1260
      ],
      "parameters": {
        "width": 400,
        "height": 240,
        "content": "# Array of prompts here"
      }
    },
    {
      "id": "4c1b5816-7393-47f6-8a88-008d8deea119",
      "name": "Initial prompts1",
      "type": "n8n-nodes-base.set",
      "position": [
        1460,
        1340
      ]
    },
    {
      "id": "8248a20f-1f90-42b0-8167-7ddcc90242a2",
      "name": "LLM Chain - Step 1",
      "type": "@n8n/n8n-nodes-langchain.chainLlm",
      "position": [
        1500,
        -220
      ]
    },
    {
      "id": "3788b23b-ccdc-4326-8ce0-1e57934d23bd",
      "name": "LLM Chain - Step 2",
      "type": "@n8n/n8n-nodes-langchain.chainLlm",
      "position": [
        1860,
        -220
      ]
    },
    {
      "id": "89e69a39-bf13-4599-8ddc-a01c4590fb9c",
      "name": "LLM Chain - Step 3",
      "type": "@n8n/n8n-nodes-langchain.chainLlm",
      "position": [
        2220,
        -220
      ]
    },
    {
      "id": "7e395991-9404-490e-8946-0da8f81e7243",
      "name": "LLM Chain - Step 4",
      "type": "@n8n/n8n-nodes-langchain.chainLlm",
      "position": [
        2580,
        -220
      ]
    },
    {
      "id": "efb8d836-8a4a-4a70-baed-4a9b77461aca",
      "name": "All LLM steps here - sequentially",
      "type": "@n8n/n8n-nodes-langchain.agent",
      "position": [
        2640,
        440
      ]
    },
    {
      "id": "926b1705-a24c-4659-bf61-8ed97ade7290",
      "name": "LLM steps - parallel",
      "type": "n8n-nodes-base.httpRequest",
      "position": [
        2140,
        1240
      ]
    },
    {
      "id": "7748574b-1abd-4697-9644-db8bb79fb08d",
      "name": "Merge output with initial prompts",
      "type": "n8n-nodes-base.merge",
      "position": [
        2440,
        1140
      ]
    },
    {
      "id": "b207d83b-ecda-4a9f-af78-cfbb2253c119",
      "name": "Merge output with initial prompts1",
      "type": "n8n-nodes-base.merge",
      "position": [
        3000,
        380
      ]
    }
  ],
  "connections": {
    "Merge": {
      "main": [
        [
          {
            "node": "All LLM steps here - sequentially",
            "type": "main",
            "index": 0
          },
          {
            "node": "Merge output with initial prompts1",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Merge2": {
      "main": [
        [
          {
            "node": "LLM steps - parallel",
            "type": "main",
            "index": 0
          },
          {
            "node": "Merge output with initial prompts",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Reshape": {
      "main": [
        [
          {
            "node": "Split Out",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Webhook": {
      "main": [
        [
          {
            "node": "Basic LLM Chain4",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Markdown": {
      "main": [
        []
      ]
    },
    "Split Out": {
      "main": [
        [
          {
            "node": "Merge",
            "type": "main",
            "index": 1
          }
        ]
      ]
    },
    "CONNECT ME": {
      "main": [
        [
          {
            "node": "LLM Chain - Step 1",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Split Out1": {
      "main": [
        [
          {
            "node": "Merge2",
            "type": "main",
            "index": 1
          }
        ]
      ]
    },
    "CONNECT ME1": {
      "main": [
        [
          {
            "node": "Clean memory",
            "type": "main",
            "index": 0
          },
          {
            "node": "Merge",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "CONNECT ME2": {
      "main": [
        [
          {
            "node": "Initial prompts1",
            "type": "main",
            "index": 0
          },
          {
            "node": "Merge2",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Clean memory": {
      "main": [
        [
          {
            "node": "Initial prompts",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "HTTP Request": {
      "main": [
        [
          {
            "node": "Markdown",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Simple Memory": {
      "ai_memory": [
        [
          {
            "node": "All LLM steps here - sequentially",
            "type": "ai_memory",
            "index": 0
          },
          {
            "node": "Clean memory",
            "type": "ai_memory",
            "index": 0
          }
        ]
      ]
    },
    "Initial prompts": {
      "main": [
        [
          {
            "node": "Reshape",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Initial prompts1": {
      "main": [
        [
          {
            "node": "Split Out1",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "LLM Chain - Step 1": {
      "main": [
        [
          {
            "node": "LLM Chain - Step 2",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "LLM Chain - Step 2": {
      "main": [
        [
          {
            "node": "LLM Chain - Step 3",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "LLM Chain - Step 3": {
      "main": [
        [
          {
            "node": "LLM Chain - Step 4",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Anthropic Chat Model": {
      "ai_languageModel": [
        [
          {
            "node": "LLM Chain - Step 1",
            "type": "ai_languageModel",
            "index": 0
          }
        ]
      ]
    },
    "LLM steps - parallel": {
      "main": [
        [
          {
            "node": "Merge output with initial prompts",
            "type": "main",
            "index": 1
          }
        ]
      ]
    },
    "Anthropic Chat Model1": {
      "ai_languageModel": [
        [
          {
            "node": "LLM Chain - Step 2",
            "type": "ai_languageModel",
            "index": 0
          }
        ]
      ]
    },
    "Anthropic Chat Model2": {
      "ai_languageModel": [
        [
          {
            "node": "LLM Chain - Step 3",
            "type": "ai_languageModel",
            "index": 0
          }
        ]
      ]
    },
    "Anthropic Chat Model3": {
      "ai_languageModel": [
        [
          {
            "node": "LLM Chain - Step 4",
            "type": "ai_languageModel",
            "index": 0
          }
        ]
      ]
    },
    "Anthropic Chat Model4": {
      "ai_languageModel": [
        [
          {
            "node": "All LLM steps here - sequentially",
            "type": "ai_languageModel",
            "index": 0
          }
        ]
      ]
    },
    "Anthropic Chat Model5": {
      "ai_languageModel": [
        [
          {
            "node": "Basic LLM Chain4",
            "type": "ai_languageModel",
            "index": 0
          }
        ]
      ]
    },
    "All LLM steps here - sequentially": {
      "main": [
        [
          {
            "node": "Merge output with initial prompts1",
            "type": "main",
            "index": 1
          }
        ]
      ]
    },
    "When clicking ‘Test workflow’": {
      "main": [
        [
          {
            "node": "HTTP Request",
            "type": "main",
            "index": 0
          }
        ]
      ]
    }
  }
}