lorum 0.1.2-alpha.1

Unified MCP configuration manager for AI coding tools
Documentation
# lorum configuration example
# Place at ~/.config/lorum/config.yaml or in your project as .lorum/config.yaml

mcp:
  servers:
    fetch:
      command: npx
      args:
        - -y
        - "@modelcontextprotocol/server-fetch"

    sequential-thinking:
      command: npx
      args:
        - -y
        - "@modelcontextprotocol/server-sequential-thinking"

    context7:
      command: npx
      args:
        - -y
        - "@upstash/context7-mcp@latest"

    memory:
      command: npx
      args:
        - -y
        - "@modelcontextprotocol/server-memory"

    github:
      command: npx
      args:
        - -y
        - "@modelcontextprotocol/server-github"
      env:
        GITHUB_PERSONAL_ACCESS_TOKEN: "${GITHUB_TOKEN}"