Add Memory Bank, Puppeteer, Sequential Thinking, and Docker MCP servers
New MCP servers added to the gateway stack: - memory-bank-mcp (port 8700): Persistent key-value memory storage with tags, categories, and search - puppeteer-mcp (port 8800): Headless browser automation via Pyppeteer (navigate, screenshot, click, JS eval, PDF gen) - sequential-thinking-mcp (port 8900): Structured step-by-step reasoning with branching hypotheses and synthesis - docker-mcp (port 9000): Docker container/image/network/volume management via Docker socket All servers follow the existing Python/FastMCP pattern with streamable-http transport. docker-compose.yml updated with service definitions and gateway backend routes.
This commit is contained in:
parent
20e6b4cf96
commit
39fff1e44a
17 changed files with 1864 additions and 0 deletions
|
|
@ -26,6 +26,11 @@ services:
|
||||||
- MCP_BACKEND_WAVE=http://mcp-wave:8300/mcp
|
- MCP_BACKEND_WAVE=http://mcp-wave:8300/mcp
|
||||||
- MCP_BACKEND_LINKEDIN=http://mcp-linkedin:8500/mcp
|
- MCP_BACKEND_LINKEDIN=http://mcp-linkedin:8500/mcp
|
||||||
- MCP_BACKEND_FORGEJO=http://mcp-forgejo:8400/mcp
|
- MCP_BACKEND_FORGEJO=http://mcp-forgejo:8400/mcp
|
||||||
|
- MCP_BACKEND_SSH=http://mcp-ssh:8600/mcp
|
||||||
|
- MCP_BACKEND_MEMORY_BANK=http://mcp-memory-bank:8700/mcp
|
||||||
|
- MCP_BACKEND_PUPPETEER=http://mcp-puppeteer:8800/mcp
|
||||||
|
- MCP_BACKEND_SEQUENTIAL_THINKING=http://mcp-sequential-thinking:8900/mcp
|
||||||
|
- MCP_BACKEND_DOCKER=http://mcp-docker:9000/mcp
|
||||||
- GATEWAY_STATIC_API_KEY=${GATEWAY_STATIC_API_KEY}
|
- GATEWAY_STATIC_API_KEY=${GATEWAY_STATIC_API_KEY}
|
||||||
depends_on:
|
depends_on:
|
||||||
- erpnext-mcp
|
- erpnext-mcp
|
||||||
|
|
@ -34,6 +39,11 @@ services:
|
||||||
- wave-mcp
|
- wave-mcp
|
||||||
- linkedin-mcp
|
- linkedin-mcp
|
||||||
- forgejo-mcp
|
- forgejo-mcp
|
||||||
|
- ssh-mcp
|
||||||
|
- memory-bank-mcp
|
||||||
|
- puppeteer-mcp
|
||||||
|
- sequential-thinking-mcp
|
||||||
|
- docker-mcp
|
||||||
networks: [mcpnet]
|
networks: [mcpnet]
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:4444/health', timeout=5)"]
|
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:4444/health', timeout=5)"]
|
||||||
|
|
@ -156,6 +166,103 @@ services:
|
||||||
start_period: 15s
|
start_period: 15s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
|
||||||
|
ssh-mcp:
|
||||||
|
build:
|
||||||
|
context: ./ssh-mcp
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: mcp-ssh
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
- PORT=8600
|
||||||
|
- SSH_HOST=${SSH_HOST}
|
||||||
|
- SSH_PORT=${SSH_PORT:-22}
|
||||||
|
- SSH_USER=${SSH_USER:-root}
|
||||||
|
- SSH_PASSWORD=${SSH_PASSWORD:-}
|
||||||
|
- SSH_KEY_PATH=${SSH_KEY_PATH:-}
|
||||||
|
- SSH_PASSPHRASE=${SSH_PASSPHRASE:-}
|
||||||
|
networks: [mcpnet]
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8600/mcp', timeout=5)"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 5s
|
||||||
|
start_period: 15s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
memory-bank-mcp:
|
||||||
|
build:
|
||||||
|
context: ./memory-bank-mcp
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: mcp-memory-bank
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
- PORT=8700
|
||||||
|
- MEMORY_DIR=/data/memories
|
||||||
|
- MAX_MEMORIES=${MAX_MEMORIES:-10000}
|
||||||
|
volumes:
|
||||||
|
- memory-bank-data:/data
|
||||||
|
networks: [mcpnet]
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8700/mcp', timeout=5)"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 5s
|
||||||
|
start_period: 15s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
puppeteer-mcp:
|
||||||
|
build:
|
||||||
|
context: ./puppeteer-mcp
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: mcp-puppeteer
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
- PORT=8800
|
||||||
|
- VIEWPORT_WIDTH=${PUPPETEER_VIEWPORT_WIDTH:-1280}
|
||||||
|
- VIEWPORT_HEIGHT=${PUPPETEER_VIEWPORT_HEIGHT:-720}
|
||||||
|
- PAGE_TIMEOUT=${PUPPETEER_PAGE_TIMEOUT:-30000}
|
||||||
|
networks: [mcpnet]
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8800/mcp', timeout=5)"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 5s
|
||||||
|
start_period: 20s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
sequential-thinking-mcp:
|
||||||
|
build:
|
||||||
|
context: ./sequential-thinking-mcp
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: mcp-sequential-thinking
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
- PORT=8900
|
||||||
|
networks: [mcpnet]
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8900/mcp', timeout=5)"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 5s
|
||||||
|
start_period: 15s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
docker-mcp:
|
||||||
|
build:
|
||||||
|
context: ./docker-mcp
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: mcp-docker
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
- PORT=9000
|
||||||
|
- DOCKER_HOST=${DOCKER_HOST:-unix:///var/run/docker.sock}
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
networks: [mcpnet]
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:9000/mcp', timeout=5)"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 5s
|
||||||
|
start_period: 15s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
gateway-data:
|
gateway-data:
|
||||||
linkedin-data:
|
linkedin-data:
|
||||||
|
memory-bank-data:
|
||||||
|
|
|
||||||
18
docker-mcp/Dockerfile
Executable file
18
docker-mcp/Dockerfile
Executable file
|
|
@ -0,0 +1,18 @@
|
||||||
|
FROM python:3.12-slim-bookworm
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY docker_mcp.py .
|
||||||
|
COPY entrypoint.py .
|
||||||
|
|
||||||
|
ENV PORT=9000
|
||||||
|
|
||||||
|
EXPOSE 9000
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=5s --start-period=15s \
|
||||||
|
CMD python3 -c "import urllib.request; urllib.request.urlopen('http://localhost:9000/mcp', timeout=5)"
|
||||||
|
|
||||||
|
CMD ["python3", "entrypoint.py"]
|
||||||
532
docker-mcp/docker_mcp.py
Executable file
532
docker-mcp/docker_mcp.py
Executable file
|
|
@ -0,0 +1,532 @@
|
||||||
|
"""
|
||||||
|
Docker MCP Server
|
||||||
|
=================
|
||||||
|
MCP server providing Docker container and image management capabilities.
|
||||||
|
Supports listing, inspecting, starting, stopping, removing containers,
|
||||||
|
managing images, viewing logs, executing commands, and managing networks/volumes.
|
||||||
|
Connects to the Docker daemon via socket or TCP.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from typing import Optional, List, Dict, Any
|
||||||
|
|
||||||
|
import docker
|
||||||
|
from docker.errors import DockerException, NotFound, APIError
|
||||||
|
from mcp.server.fastmcp import FastMCP
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Configuration
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
DOCKER_HOST = os.environ.get("DOCKER_HOST", "unix:///var/run/docker.sock")
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# MCP Server
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
mcp = FastMCP("docker_mcp")
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Docker client
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _client() -> docker.DockerClient:
|
||||||
|
"""Get a Docker client instance."""
|
||||||
|
return docker.DockerClient(base_url=DOCKER_HOST, timeout=30)
|
||||||
|
|
||||||
|
|
||||||
|
def _safe(func):
|
||||||
|
"""Wrapper for safe Docker API calls."""
|
||||||
|
try:
|
||||||
|
return func()
|
||||||
|
except NotFound as e:
|
||||||
|
return {"error": f"Not found: {str(e)}"}
|
||||||
|
except APIError as e:
|
||||||
|
return {"error": f"Docker API error: {str(e)}"}
|
||||||
|
except DockerException as e:
|
||||||
|
return {"error": f"Docker error: {str(e)}"}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Container tools
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def list_containers(
|
||||||
|
all: bool = True,
|
||||||
|
filters: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
List Docker containers.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
all: Show all containers (including stopped). Default True.
|
||||||
|
filters: Optional filter dict (e.g., {"status": "running", "name": "my-app"})
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
containers = client.containers.list(all=all, filters=filters or {})
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for c in containers:
|
||||||
|
result.append({
|
||||||
|
"id": c.short_id,
|
||||||
|
"name": c.name,
|
||||||
|
"image": str(c.image.tags[0]) if c.image.tags else str(c.image.id[:12]),
|
||||||
|
"status": c.status,
|
||||||
|
"state": c.attrs.get("State", {}).get("Status"),
|
||||||
|
"created": c.attrs.get("Created"),
|
||||||
|
"ports": c.ports,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {"containers": result, "total": len(result)}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def inspect_container(container_id: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get detailed information about a container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
container_id: Container ID or name
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
c = client.containers.get(container_id)
|
||||||
|
attrs = c.attrs
|
||||||
|
return {
|
||||||
|
"id": c.short_id,
|
||||||
|
"name": c.name,
|
||||||
|
"image": str(c.image.tags[0]) if c.image.tags else str(c.image.id[:12]),
|
||||||
|
"status": c.status,
|
||||||
|
"state": attrs.get("State", {}),
|
||||||
|
"config": {
|
||||||
|
"env": attrs.get("Config", {}).get("Env", []),
|
||||||
|
"cmd": attrs.get("Config", {}).get("Cmd"),
|
||||||
|
"entrypoint": attrs.get("Config", {}).get("Entrypoint"),
|
||||||
|
"working_dir": attrs.get("Config", {}).get("WorkingDir"),
|
||||||
|
},
|
||||||
|
"network": attrs.get("NetworkSettings", {}).get("Networks", {}),
|
||||||
|
"mounts": attrs.get("Mounts", []),
|
||||||
|
"ports": c.ports,
|
||||||
|
"created": attrs.get("Created"),
|
||||||
|
"restart_count": attrs.get("RestartCount", 0),
|
||||||
|
}
|
||||||
|
except NotFound:
|
||||||
|
return {"error": f"Container '{container_id}' not found"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def container_logs(
|
||||||
|
container_id: str,
|
||||||
|
tail: int = 100,
|
||||||
|
since: Optional[str] = None,
|
||||||
|
timestamps: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get logs from a container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
container_id: Container ID or name
|
||||||
|
tail: Number of lines from the end (default 100)
|
||||||
|
since: Show logs since timestamp (e.g., '2024-01-01T00:00:00')
|
||||||
|
timestamps: Include timestamps in log output
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
c = client.containers.get(container_id)
|
||||||
|
kwargs: Dict[str, Any] = {
|
||||||
|
"tail": tail,
|
||||||
|
"timestamps": timestamps,
|
||||||
|
"stdout": True,
|
||||||
|
"stderr": True,
|
||||||
|
}
|
||||||
|
if since:
|
||||||
|
kwargs["since"] = since
|
||||||
|
|
||||||
|
logs = c.logs(**kwargs)
|
||||||
|
log_text = logs.decode("utf-8", errors="replace")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"container": container_id,
|
||||||
|
"lines": tail,
|
||||||
|
"logs": log_text,
|
||||||
|
}
|
||||||
|
except NotFound:
|
||||||
|
return {"error": f"Container '{container_id}' not found"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def start_container(container_id: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Start a stopped container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
container_id: Container ID or name
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
c = client.containers.get(container_id)
|
||||||
|
c.start()
|
||||||
|
c.reload()
|
||||||
|
return {"status": "started", "container": c.name, "state": c.status}
|
||||||
|
except NotFound:
|
||||||
|
return {"error": f"Container '{container_id}' not found"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def stop_container(
|
||||||
|
container_id: str,
|
||||||
|
timeout: int = 10,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Stop a running container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
container_id: Container ID or name
|
||||||
|
timeout: Seconds to wait before killing (default 10)
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
c = client.containers.get(container_id)
|
||||||
|
c.stop(timeout=timeout)
|
||||||
|
c.reload()
|
||||||
|
return {"status": "stopped", "container": c.name, "state": c.status}
|
||||||
|
except NotFound:
|
||||||
|
return {"error": f"Container '{container_id}' not found"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def restart_container(
|
||||||
|
container_id: str,
|
||||||
|
timeout: int = 10,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Restart a container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
container_id: Container ID or name
|
||||||
|
timeout: Seconds to wait before killing (default 10)
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
c = client.containers.get(container_id)
|
||||||
|
c.restart(timeout=timeout)
|
||||||
|
c.reload()
|
||||||
|
return {"status": "restarted", "container": c.name, "state": c.status}
|
||||||
|
except NotFound:
|
||||||
|
return {"error": f"Container '{container_id}' not found"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def remove_container(
|
||||||
|
container_id: str,
|
||||||
|
force: bool = False,
|
||||||
|
v: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Remove a container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
container_id: Container ID or name
|
||||||
|
force: Force remove even if running
|
||||||
|
v: Remove associated volumes
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
c = client.containers.get(container_id)
|
||||||
|
name = c.name
|
||||||
|
c.remove(force=force, v=v)
|
||||||
|
return {"status": "removed", "container": name}
|
||||||
|
except NotFound:
|
||||||
|
return {"error": f"Container '{container_id}' not found"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def exec_in_container(
|
||||||
|
container_id: str,
|
||||||
|
command: str,
|
||||||
|
workdir: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Execute a command inside a running container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
container_id: Container ID or name
|
||||||
|
command: Command to execute (shell string)
|
||||||
|
workdir: Optional working directory inside the container
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
c = client.containers.get(container_id)
|
||||||
|
kwargs: Dict[str, Any] = {"cmd": command, "stdout": True, "stderr": True}
|
||||||
|
if workdir:
|
||||||
|
kwargs["workdir"] = workdir
|
||||||
|
|
||||||
|
exit_code, output = c.exec_run(**kwargs)
|
||||||
|
return {
|
||||||
|
"container": container_id,
|
||||||
|
"command": command,
|
||||||
|
"exit_code": exit_code,
|
||||||
|
"output": output.decode("utf-8", errors="replace"),
|
||||||
|
}
|
||||||
|
except NotFound:
|
||||||
|
return {"error": f"Container '{container_id}' not found"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def container_stats(container_id: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get resource usage stats for a container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
container_id: Container ID or name
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
c = client.containers.get(container_id)
|
||||||
|
stats = c.stats(stream=False)
|
||||||
|
|
||||||
|
# Parse CPU
|
||||||
|
cpu_delta = stats["cpu_stats"]["cpu_usage"]["total_usage"] - \
|
||||||
|
stats["precpu_stats"]["cpu_usage"]["total_usage"]
|
||||||
|
system_delta = stats["cpu_stats"]["system_cpu_usage"] - \
|
||||||
|
stats["precpu_stats"]["system_cpu_usage"]
|
||||||
|
num_cpus = len(stats["cpu_stats"]["cpu_usage"].get("percpu_usage", [1]))
|
||||||
|
cpu_percent = (cpu_delta / system_delta) * num_cpus * 100.0 if system_delta > 0 else 0.0
|
||||||
|
|
||||||
|
# Parse Memory
|
||||||
|
mem_usage = stats["memory_stats"].get("usage", 0)
|
||||||
|
mem_limit = stats["memory_stats"].get("limit", 1)
|
||||||
|
mem_percent = (mem_usage / mem_limit) * 100.0
|
||||||
|
|
||||||
|
return {
|
||||||
|
"container": container_id,
|
||||||
|
"cpu_percent": round(cpu_percent, 2),
|
||||||
|
"memory_usage_mb": round(mem_usage / (1024 * 1024), 2),
|
||||||
|
"memory_limit_mb": round(mem_limit / (1024 * 1024), 2),
|
||||||
|
"memory_percent": round(mem_percent, 2),
|
||||||
|
"network_rx_bytes": sum(
|
||||||
|
v.get("rx_bytes", 0) for v in stats.get("networks", {}).values()
|
||||||
|
),
|
||||||
|
"network_tx_bytes": sum(
|
||||||
|
v.get("tx_bytes", 0) for v in stats.get("networks", {}).values()
|
||||||
|
),
|
||||||
|
"pids": stats.get("pids_stats", {}).get("current", 0),
|
||||||
|
}
|
||||||
|
except NotFound:
|
||||||
|
return {"error": f"Container '{container_id}' not found"}
|
||||||
|
except (KeyError, ZeroDivisionError) as e:
|
||||||
|
return {"error": f"Failed to parse stats: {str(e)}"}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Image tools
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def list_images(
|
||||||
|
name: Optional[str] = None,
|
||||||
|
all: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
List Docker images.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Optional filter by image name
|
||||||
|
all: Show all images including intermediate layers
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
images = client.images.list(name=name, all=all)
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for img in images:
|
||||||
|
result.append({
|
||||||
|
"id": img.short_id,
|
||||||
|
"tags": img.tags,
|
||||||
|
"size_mb": round(img.attrs.get("Size", 0) / (1024 * 1024), 2),
|
||||||
|
"created": img.attrs.get("Created"),
|
||||||
|
})
|
||||||
|
|
||||||
|
return {"images": result, "total": len(result)}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def pull_image(
|
||||||
|
image: str,
|
||||||
|
tag: str = "latest",
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Pull a Docker image from a registry.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
image: Image name (e.g., 'nginx', 'python')
|
||||||
|
tag: Image tag (default: 'latest')
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
img = client.images.pull(image, tag=tag)
|
||||||
|
return {
|
||||||
|
"status": "pulled",
|
||||||
|
"image": f"{image}:{tag}",
|
||||||
|
"id": img.short_id,
|
||||||
|
"size_mb": round(img.attrs.get("Size", 0) / (1024 * 1024), 2),
|
||||||
|
}
|
||||||
|
except APIError as e:
|
||||||
|
return {"error": f"Failed to pull {image}:{tag}: {str(e)}"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def remove_image(
|
||||||
|
image: str,
|
||||||
|
force: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Remove a Docker image.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
image: Image ID or name:tag
|
||||||
|
force: Force removal
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
try:
|
||||||
|
client.images.remove(image, force=force)
|
||||||
|
return {"status": "removed", "image": image}
|
||||||
|
except NotFound:
|
||||||
|
return {"error": f"Image '{image}' not found"}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# System tools
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def docker_system_info() -> Dict[str, Any]:
|
||||||
|
"""Get Docker system-wide information."""
|
||||||
|
client = _client()
|
||||||
|
info = client.info()
|
||||||
|
return {
|
||||||
|
"docker_version": info.get("ServerVersion"),
|
||||||
|
"os": info.get("OperatingSystem"),
|
||||||
|
"arch": info.get("Architecture"),
|
||||||
|
"cpus": info.get("NCPU"),
|
||||||
|
"memory_gb": round(info.get("MemTotal", 0) / (1024**3), 2),
|
||||||
|
"containers_running": info.get("ContainersRunning"),
|
||||||
|
"containers_stopped": info.get("ContainersStopped"),
|
||||||
|
"containers_paused": info.get("ContainersPaused"),
|
||||||
|
"images": info.get("Images"),
|
||||||
|
"storage_driver": info.get("Driver"),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def docker_disk_usage() -> Dict[str, Any]:
|
||||||
|
"""Get Docker disk usage summary."""
|
||||||
|
client = _client()
|
||||||
|
df = client.df()
|
||||||
|
|
||||||
|
containers_size = sum(c.get("SizeRw", 0) for c in df.get("Containers", []))
|
||||||
|
images_size = sum(i.get("Size", 0) for i in df.get("Images", []))
|
||||||
|
volumes_size = sum(v.get("UsageData", {}).get("Size", 0) for v in df.get("Volumes", []))
|
||||||
|
|
||||||
|
return {
|
||||||
|
"containers_size_mb": round(containers_size / (1024 * 1024), 2),
|
||||||
|
"images_size_mb": round(images_size / (1024 * 1024), 2),
|
||||||
|
"volumes_size_mb": round(volumes_size / (1024 * 1024), 2),
|
||||||
|
"total_mb": round((containers_size + images_size + volumes_size) / (1024 * 1024), 2),
|
||||||
|
"images_count": len(df.get("Images", [])),
|
||||||
|
"containers_count": len(df.get("Containers", [])),
|
||||||
|
"volumes_count": len(df.get("Volumes", [])),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Network tools
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def list_networks() -> Dict[str, Any]:
|
||||||
|
"""List Docker networks."""
|
||||||
|
client = _client()
|
||||||
|
networks = client.networks.list()
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for n in networks:
|
||||||
|
result.append({
|
||||||
|
"id": n.short_id,
|
||||||
|
"name": n.name,
|
||||||
|
"driver": n.attrs.get("Driver"),
|
||||||
|
"scope": n.attrs.get("Scope"),
|
||||||
|
"containers": len(n.attrs.get("Containers", {})),
|
||||||
|
})
|
||||||
|
|
||||||
|
return {"networks": result, "total": len(result)}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Volume tools
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def list_volumes() -> Dict[str, Any]:
|
||||||
|
"""List Docker volumes."""
|
||||||
|
client = _client()
|
||||||
|
volumes = client.volumes.list()
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for v in volumes:
|
||||||
|
result.append({
|
||||||
|
"name": v.name,
|
||||||
|
"driver": v.attrs.get("Driver"),
|
||||||
|
"mountpoint": v.attrs.get("Mountpoint"),
|
||||||
|
"created": v.attrs.get("CreatedAt"),
|
||||||
|
})
|
||||||
|
|
||||||
|
return {"volumes": result, "total": len(result)}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def prune_system(
|
||||||
|
containers: bool = True,
|
||||||
|
images: bool = True,
|
||||||
|
volumes: bool = False,
|
||||||
|
networks: bool = True,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Prune unused Docker resources.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
containers: Prune stopped containers
|
||||||
|
images: Prune dangling images
|
||||||
|
volumes: Prune unused volumes (CAUTION: data loss)
|
||||||
|
networks: Prune unused networks
|
||||||
|
"""
|
||||||
|
client = _client()
|
||||||
|
results = {}
|
||||||
|
|
||||||
|
if containers:
|
||||||
|
r = client.containers.prune()
|
||||||
|
results["containers_deleted"] = len(r.get("ContainersDeleted", []) or [])
|
||||||
|
results["containers_space_mb"] = round(r.get("SpaceReclaimed", 0) / (1024 * 1024), 2)
|
||||||
|
|
||||||
|
if images:
|
||||||
|
r = client.images.prune()
|
||||||
|
results["images_deleted"] = len(r.get("ImagesDeleted", []) or [])
|
||||||
|
results["images_space_mb"] = round(r.get("SpaceReclaimed", 0) / (1024 * 1024), 2)
|
||||||
|
|
||||||
|
if volumes:
|
||||||
|
r = client.volumes.prune()
|
||||||
|
results["volumes_deleted"] = len(r.get("VolumesDeleted", []) or [])
|
||||||
|
results["volumes_space_mb"] = round(r.get("SpaceReclaimed", 0) / (1024 * 1024), 2)
|
||||||
|
|
||||||
|
if networks:
|
||||||
|
r = client.networks.prune()
|
||||||
|
results["networks_deleted"] = len(r.get("NetworksDeleted", []) or [])
|
||||||
|
|
||||||
|
return results
|
||||||
12
docker-mcp/entrypoint.py
Executable file
12
docker-mcp/entrypoint.py
Executable file
|
|
@ -0,0 +1,12 @@
|
||||||
|
import os
|
||||||
|
from docker_mcp import mcp
|
||||||
|
from mcp.server.fastmcp.server import TransportSecuritySettings
|
||||||
|
|
||||||
|
mcp.settings.host = "0.0.0.0"
|
||||||
|
mcp.settings.port = int(os.environ.get("PORT", "9000"))
|
||||||
|
mcp.settings.transport_security = TransportSecuritySettings(
|
||||||
|
enable_dns_rebinding_protection=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
mcp.run(transport="streamable-http")
|
||||||
6
docker-mcp/requirements.txt
Executable file
6
docker-mcp/requirements.txt
Executable file
|
|
@ -0,0 +1,6 @@
|
||||||
|
mcp[cli]>=1.0.0
|
||||||
|
httpx>=0.27.0
|
||||||
|
pydantic>=2.0.0
|
||||||
|
uvicorn>=0.30.0
|
||||||
|
starlette>=0.38.0
|
||||||
|
docker>=7.0.0
|
||||||
18
memory-bank-mcp/Dockerfile
Executable file
18
memory-bank-mcp/Dockerfile
Executable file
|
|
@ -0,0 +1,18 @@
|
||||||
|
FROM python:3.12-slim-bookworm
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY memory_bank_mcp.py .
|
||||||
|
COPY entrypoint.py .
|
||||||
|
|
||||||
|
ENV PORT=8700
|
||||||
|
|
||||||
|
EXPOSE 8700
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=5s --start-period=15s \
|
||||||
|
CMD python3 -c "import urllib.request; urllib.request.urlopen('http://localhost:8700/mcp', timeout=5)"
|
||||||
|
|
||||||
|
CMD ["python3", "entrypoint.py"]
|
||||||
12
memory-bank-mcp/entrypoint.py
Executable file
12
memory-bank-mcp/entrypoint.py
Executable file
|
|
@ -0,0 +1,12 @@
|
||||||
|
import os
|
||||||
|
from memory_bank_mcp import mcp
|
||||||
|
from mcp.server.fastmcp.server import TransportSecuritySettings
|
||||||
|
|
||||||
|
mcp.settings.host = "0.0.0.0"
|
||||||
|
mcp.settings.port = int(os.environ.get("PORT", "8700"))
|
||||||
|
mcp.settings.transport_security = TransportSecuritySettings(
|
||||||
|
enable_dns_rebinding_protection=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
mcp.run(transport="streamable-http")
|
||||||
353
memory-bank-mcp/memory_bank_mcp.py
Executable file
353
memory-bank-mcp/memory_bank_mcp.py
Executable file
|
|
@ -0,0 +1,353 @@
|
||||||
|
"""
|
||||||
|
Memory Bank MCP Server
|
||||||
|
======================
|
||||||
|
MCP server providing persistent memory storage for LLM conversations.
|
||||||
|
Stores and retrieves key-value memories with metadata, tags, and
|
||||||
|
semantic search capabilities. Backed by a local JSON file store.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import hashlib
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, List, Any, Dict
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
from mcp.server.fastmcp import FastMCP
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Configuration
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
MEMORY_DIR = os.environ.get("MEMORY_DIR", "/data/memories")
|
||||||
|
MAX_MEMORIES = int(os.environ.get("MAX_MEMORIES", "10000"))
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# MCP Server
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
mcp = FastMCP("memory_bank_mcp")
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Storage helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_dir():
|
||||||
|
"""Ensure the memory directory exists."""
|
||||||
|
Path(MEMORY_DIR).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _index_path() -> str:
|
||||||
|
return os.path.join(MEMORY_DIR, "_index.json")
|
||||||
|
|
||||||
|
|
||||||
|
def _load_index() -> Dict[str, Any]:
|
||||||
|
"""Load the memory index."""
|
||||||
|
_ensure_dir()
|
||||||
|
idx_path = _index_path()
|
||||||
|
if os.path.exists(idx_path):
|
||||||
|
with open(idx_path, "r") as f:
|
||||||
|
return json.load(f)
|
||||||
|
return {"memories": {}, "tags": {}}
|
||||||
|
|
||||||
|
|
||||||
|
def _save_index(index: Dict[str, Any]):
|
||||||
|
"""Save the memory index."""
|
||||||
|
_ensure_dir()
|
||||||
|
with open(_index_path(), "w") as f:
|
||||||
|
json.dump(index, f, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
def _memory_path(memory_id: str) -> str:
|
||||||
|
return os.path.join(MEMORY_DIR, f"{memory_id}.json")
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_id(content: str) -> str:
|
||||||
|
"""Generate a deterministic ID from content."""
|
||||||
|
return hashlib.sha256(content.encode()).hexdigest()[:16]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tools
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def store_memory(
|
||||||
|
key: str,
|
||||||
|
content: str,
|
||||||
|
tags: Optional[List[str]] = None,
|
||||||
|
category: Optional[str] = None,
|
||||||
|
metadata: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Store a memory with a key, content, optional tags, category, and metadata.
|
||||||
|
If a memory with the same key exists, it will be updated.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Unique key/name for this memory
|
||||||
|
content: The content to remember
|
||||||
|
tags: Optional list of tags for categorization
|
||||||
|
category: Optional category (e.g., 'user_preference', 'fact', 'context')
|
||||||
|
metadata: Optional additional metadata dict
|
||||||
|
"""
|
||||||
|
index = _load_index()
|
||||||
|
now = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
memory = {
|
||||||
|
"key": key,
|
||||||
|
"content": content,
|
||||||
|
"tags": tags or [],
|
||||||
|
"category": category or "general",
|
||||||
|
"metadata": metadata or {},
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
"access_count": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update if exists
|
||||||
|
if key in index["memories"]:
|
||||||
|
existing = _load_memory_file(key)
|
||||||
|
if existing:
|
||||||
|
memory["created_at"] = existing.get("created_at", now)
|
||||||
|
memory["access_count"] = existing.get("access_count", 0)
|
||||||
|
|
||||||
|
# Save memory file
|
||||||
|
_ensure_dir()
|
||||||
|
with open(_memory_path(key), "w") as f:
|
||||||
|
json.dump(memory, f, indent=2)
|
||||||
|
|
||||||
|
# Update index
|
||||||
|
index["memories"][key] = {
|
||||||
|
"category": memory["category"],
|
||||||
|
"tags": memory["tags"],
|
||||||
|
"updated_at": now,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update tag index
|
||||||
|
for tag in memory["tags"]:
|
||||||
|
if tag not in index["tags"]:
|
||||||
|
index["tags"][tag] = []
|
||||||
|
if key not in index["tags"][tag]:
|
||||||
|
index["tags"][tag].append(key)
|
||||||
|
|
||||||
|
_save_index(index)
|
||||||
|
|
||||||
|
return {"status": "stored", "key": key, "updated_at": now}
|
||||||
|
|
||||||
|
|
||||||
|
def _load_memory_file(key: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Load a memory file by key."""
|
||||||
|
path = _memory_path(key)
|
||||||
|
if os.path.exists(path):
|
||||||
|
with open(path, "r") as f:
|
||||||
|
return json.load(f)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def recall_memory(key: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Retrieve a specific memory by its key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The key of the memory to retrieve
|
||||||
|
"""
|
||||||
|
memory = _load_memory_file(key)
|
||||||
|
if not memory:
|
||||||
|
return {"error": f"Memory '{key}' not found"}
|
||||||
|
|
||||||
|
# Update access count
|
||||||
|
memory["access_count"] = memory.get("access_count", 0) + 1
|
||||||
|
memory["last_accessed"] = datetime.now(timezone.utc).isoformat()
|
||||||
|
with open(_memory_path(key), "w") as f:
|
||||||
|
json.dump(memory, f, indent=2)
|
||||||
|
|
||||||
|
return memory
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def search_memories(
|
||||||
|
query: Optional[str] = None,
|
||||||
|
tags: Optional[List[str]] = None,
|
||||||
|
category: Optional[str] = None,
|
||||||
|
limit: int = 20,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Search memories by text query, tags, or category.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query: Optional text to search for in memory content and keys
|
||||||
|
tags: Optional list of tags to filter by (memories must have ALL tags)
|
||||||
|
category: Optional category to filter by
|
||||||
|
limit: Maximum results to return (default 20)
|
||||||
|
"""
|
||||||
|
index = _load_index()
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# Get candidate keys
|
||||||
|
candidate_keys = set(index["memories"].keys())
|
||||||
|
|
||||||
|
# Filter by tags
|
||||||
|
if tags:
|
||||||
|
for tag in tags:
|
||||||
|
tag_keys = set(index["tags"].get(tag, []))
|
||||||
|
candidate_keys &= tag_keys
|
||||||
|
|
||||||
|
# Filter by category
|
||||||
|
if category:
|
||||||
|
candidate_keys = {
|
||||||
|
k for k in candidate_keys
|
||||||
|
if index["memories"].get(k, {}).get("category") == category
|
||||||
|
}
|
||||||
|
|
||||||
|
# Load and search
|
||||||
|
for key in candidate_keys:
|
||||||
|
memory = _load_memory_file(key)
|
||||||
|
if not memory:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if query:
|
||||||
|
query_lower = query.lower()
|
||||||
|
if (
|
||||||
|
query_lower not in memory.get("content", "").lower()
|
||||||
|
and query_lower not in memory.get("key", "").lower()
|
||||||
|
and not any(query_lower in t.lower() for t in memory.get("tags", []))
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
"key": memory["key"],
|
||||||
|
"content": memory["content"][:200] + ("..." if len(memory.get("content", "")) > 200 else ""),
|
||||||
|
"category": memory.get("category"),
|
||||||
|
"tags": memory.get("tags", []),
|
||||||
|
"updated_at": memory.get("updated_at"),
|
||||||
|
"access_count": memory.get("access_count", 0),
|
||||||
|
})
|
||||||
|
|
||||||
|
# Sort by most recently updated
|
||||||
|
results.sort(key=lambda x: x.get("updated_at", ""), reverse=True)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total": len(results),
|
||||||
|
"results": results[:limit],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def delete_memory(key: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Delete a specific memory by key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The key of the memory to delete
|
||||||
|
"""
|
||||||
|
index = _load_index()
|
||||||
|
|
||||||
|
if key not in index["memories"]:
|
||||||
|
return {"error": f"Memory '{key}' not found"}
|
||||||
|
|
||||||
|
# Remove from tag index
|
||||||
|
mem_info = index["memories"][key]
|
||||||
|
for tag in mem_info.get("tags", []):
|
||||||
|
if tag in index["tags"] and key in index["tags"][tag]:
|
||||||
|
index["tags"][tag].remove(key)
|
||||||
|
if not index["tags"][tag]:
|
||||||
|
del index["tags"][tag]
|
||||||
|
|
||||||
|
# Remove from index
|
||||||
|
del index["memories"][key]
|
||||||
|
_save_index(index)
|
||||||
|
|
||||||
|
# Remove file
|
||||||
|
path = _memory_path(key)
|
||||||
|
if os.path.exists(path):
|
||||||
|
os.remove(path)
|
||||||
|
|
||||||
|
return {"status": "deleted", "key": key}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def list_memories(
|
||||||
|
category: Optional[str] = None,
|
||||||
|
limit: int = 50,
|
||||||
|
offset: int = 0,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
List all stored memories with optional category filter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
category: Optional category to filter by
|
||||||
|
limit: Maximum results (default 50)
|
||||||
|
offset: Skip first N results (default 0)
|
||||||
|
"""
|
||||||
|
index = _load_index()
|
||||||
|
memories = []
|
||||||
|
|
||||||
|
for key, info in index["memories"].items():
|
||||||
|
if category and info.get("category") != category:
|
||||||
|
continue
|
||||||
|
memories.append({
|
||||||
|
"key": key,
|
||||||
|
"category": info.get("category"),
|
||||||
|
"tags": info.get("tags", []),
|
||||||
|
"updated_at": info.get("updated_at"),
|
||||||
|
})
|
||||||
|
|
||||||
|
memories.sort(key=lambda x: x.get("updated_at", ""), reverse=True)
|
||||||
|
total = len(memories)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total": total,
|
||||||
|
"offset": offset,
|
||||||
|
"limit": limit,
|
||||||
|
"memories": memories[offset:offset + limit],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def get_memory_stats() -> Dict[str, Any]:
|
||||||
|
"""Get statistics about the memory bank."""
|
||||||
|
index = _load_index()
|
||||||
|
|
||||||
|
categories = {}
|
||||||
|
for info in index["memories"].values():
|
||||||
|
cat = info.get("category", "general")
|
||||||
|
categories[cat] = categories.get(cat, 0) + 1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total_memories": len(index["memories"]),
|
||||||
|
"total_tags": len(index["tags"]),
|
||||||
|
"categories": categories,
|
||||||
|
"max_memories": MAX_MEMORIES,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def bulk_store_memories(
|
||||||
|
memories: List[Dict[str, Any]],
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Store multiple memories at once.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
memories: List of memory objects, each with 'key', 'content', and optional 'tags', 'category', 'metadata'
|
||||||
|
"""
|
||||||
|
results = []
|
||||||
|
for mem in memories:
|
||||||
|
result = await store_memory(
|
||||||
|
key=mem["key"],
|
||||||
|
content=mem["content"],
|
||||||
|
tags=mem.get("tags"),
|
||||||
|
category=mem.get("category"),
|
||||||
|
metadata=mem.get("metadata"),
|
||||||
|
)
|
||||||
|
results.append(result)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "bulk_stored",
|
||||||
|
"count": len(results),
|
||||||
|
"results": results,
|
||||||
|
}
|
||||||
6
memory-bank-mcp/requirements.txt
Executable file
6
memory-bank-mcp/requirements.txt
Executable file
|
|
@ -0,0 +1,6 @@
|
||||||
|
mcp[cli]>=1.0.0
|
||||||
|
httpx>=0.27.0
|
||||||
|
pydantic>=2.0.0
|
||||||
|
uvicorn>=0.30.0
|
||||||
|
starlette>=0.38.0
|
||||||
|
aiofiles>=24.1.0
|
||||||
37
puppeteer-mcp/Dockerfile
Executable file
37
puppeteer-mcp/Dockerfile
Executable file
|
|
@ -0,0 +1,37 @@
|
||||||
|
FROM python:3.12-slim-bookworm
|
||||||
|
|
||||||
|
# Install Chromium and dependencies for headless browser
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
chromium \
|
||||||
|
chromium-sandbox \
|
||||||
|
fonts-liberation \
|
||||||
|
libnss3 \
|
||||||
|
libatk-bridge2.0-0 \
|
||||||
|
libdrm2 \
|
||||||
|
libxkbcommon0 \
|
||||||
|
libgbm1 \
|
||||||
|
libasound2 \
|
||||||
|
libatspi2.0-0 \
|
||||||
|
libgtk-3-0 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Point pyppeteer to system Chromium
|
||||||
|
ENV PYPPETEER_CHROMIUM_REVISION=0
|
||||||
|
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY puppeteer_mcp.py .
|
||||||
|
COPY entrypoint.py .
|
||||||
|
|
||||||
|
ENV PORT=8800
|
||||||
|
|
||||||
|
EXPOSE 8800
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=5s --start-period=20s \
|
||||||
|
CMD python3 -c "import urllib.request; urllib.request.urlopen('http://localhost:8800/mcp', timeout=5)"
|
||||||
|
|
||||||
|
CMD ["python3", "entrypoint.py"]
|
||||||
12
puppeteer-mcp/entrypoint.py
Executable file
12
puppeteer-mcp/entrypoint.py
Executable file
|
|
@ -0,0 +1,12 @@
|
||||||
|
import os
|
||||||
|
from puppeteer_mcp import mcp
|
||||||
|
from mcp.server.fastmcp.server import TransportSecuritySettings
|
||||||
|
|
||||||
|
mcp.settings.host = "0.0.0.0"
|
||||||
|
mcp.settings.port = int(os.environ.get("PORT", "8800"))
|
||||||
|
mcp.settings.transport_security = TransportSecuritySettings(
|
||||||
|
enable_dns_rebinding_protection=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
mcp.run(transport="streamable-http")
|
||||||
335
puppeteer-mcp/puppeteer_mcp.py
Executable file
335
puppeteer-mcp/puppeteer_mcp.py
Executable file
|
|
@ -0,0 +1,335 @@
|
||||||
|
"""
|
||||||
|
Puppeteer MCP Server
|
||||||
|
====================
|
||||||
|
MCP server providing headless browser automation via Pyppeteer.
|
||||||
|
Supports navigation, screenshots, page content extraction, form filling,
|
||||||
|
clicking elements, JavaScript evaluation, and PDF generation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from typing import Optional, List, Dict, Any
|
||||||
|
|
||||||
|
from mcp.server.fastmcp import FastMCP
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Configuration
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
CHROMIUM_ARGS = os.environ.get("CHROMIUM_ARGS", "--no-sandbox,--disable-setuid-sandbox,--disable-dev-shm-usage,--disable-gpu").split(",")
|
||||||
|
DEFAULT_VIEWPORT_WIDTH = int(os.environ.get("VIEWPORT_WIDTH", "1280"))
|
||||||
|
DEFAULT_VIEWPORT_HEIGHT = int(os.environ.get("VIEWPORT_HEIGHT", "720"))
|
||||||
|
DEFAULT_TIMEOUT = int(os.environ.get("PAGE_TIMEOUT", "30000"))
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# MCP Server
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
mcp = FastMCP("puppeteer_mcp")
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Browser management
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
_browser = None
|
||||||
|
_pages: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_browser():
|
||||||
|
"""Get or create the browser instance."""
|
||||||
|
global _browser
|
||||||
|
if _browser is None or not _browser.process:
|
||||||
|
from pyppeteer import launch
|
||||||
|
_browser = await launch(
|
||||||
|
headless=True,
|
||||||
|
args=CHROMIUM_ARGS,
|
||||||
|
handleSIGINT=False,
|
||||||
|
handleSIGTERM=False,
|
||||||
|
handleSIGHUP=False,
|
||||||
|
)
|
||||||
|
return _browser
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_page(page_id: str = "default") -> Any:
|
||||||
|
"""Get or create a page by ID."""
|
||||||
|
global _pages
|
||||||
|
browser = await _get_browser()
|
||||||
|
|
||||||
|
if page_id not in _pages or _pages[page_id].isClosed():
|
||||||
|
page = await browser.newPage()
|
||||||
|
await page.setViewport({
|
||||||
|
"width": DEFAULT_VIEWPORT_WIDTH,
|
||||||
|
"height": DEFAULT_VIEWPORT_HEIGHT,
|
||||||
|
})
|
||||||
|
_pages[page_id] = page
|
||||||
|
|
||||||
|
return _pages[page_id]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tools
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def navigate(
|
||||||
|
url: str,
|
||||||
|
page_id: str = "default",
|
||||||
|
wait_until: str = "networkidle2",
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Navigate to a URL in the browser.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: The URL to navigate to
|
||||||
|
page_id: Identifier for the browser tab/page (default: "default")
|
||||||
|
wait_until: When to consider navigation complete: 'load', 'domcontentloaded', 'networkidle0', 'networkidle2'
|
||||||
|
"""
|
||||||
|
page = await _get_page(page_id)
|
||||||
|
response = await page.goto(url, waitUntil=wait_until, timeout=DEFAULT_TIMEOUT)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "navigated",
|
||||||
|
"url": page.url,
|
||||||
|
"status_code": response.status if response else None,
|
||||||
|
"title": await page.title(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def screenshot(
|
||||||
|
page_id: str = "default",
|
||||||
|
full_page: bool = False,
|
||||||
|
selector: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Take a screenshot of the current page or a specific element.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_id: Page identifier (default: "default")
|
||||||
|
full_page: Whether to capture the full scrollable page
|
||||||
|
selector: Optional CSS selector to screenshot a specific element
|
||||||
|
"""
|
||||||
|
page = await _get_page(page_id)
|
||||||
|
|
||||||
|
options: Dict[str, Any] = {"encoding": "binary"}
|
||||||
|
|
||||||
|
if selector:
|
||||||
|
element = await page.querySelector(selector)
|
||||||
|
if not element:
|
||||||
|
return {"error": f"Element not found: {selector}"}
|
||||||
|
screenshot_bytes = await element.screenshot()
|
||||||
|
else:
|
||||||
|
options["fullPage"] = full_page
|
||||||
|
screenshot_bytes = await page.screenshot(options)
|
||||||
|
|
||||||
|
b64 = base64.b64encode(screenshot_bytes).decode("utf-8")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "screenshot_taken",
|
||||||
|
"format": "png",
|
||||||
|
"base64": b64,
|
||||||
|
"url": page.url,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def get_page_content(
|
||||||
|
page_id: str = "default",
|
||||||
|
content_type: str = "text",
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get the content of the current page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_id: Page identifier (default: "default")
|
||||||
|
content_type: 'text' for visible text, 'html' for full HTML source
|
||||||
|
"""
|
||||||
|
page = await _get_page(page_id)
|
||||||
|
|
||||||
|
if content_type == "html":
|
||||||
|
content = await page.content()
|
||||||
|
else:
|
||||||
|
content = await page.evaluate("() => document.body.innerText")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"url": page.url,
|
||||||
|
"title": await page.title(),
|
||||||
|
"content_type": content_type,
|
||||||
|
"content": content[:50000], # Limit to 50k chars
|
||||||
|
"truncated": len(content) > 50000,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def click(
|
||||||
|
selector: str,
|
||||||
|
page_id: str = "default",
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Click an element on the page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
selector: CSS selector for the element to click
|
||||||
|
page_id: Page identifier (default: "default")
|
||||||
|
"""
|
||||||
|
page = await _get_page(page_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await page.waitForSelector(selector, timeout=5000)
|
||||||
|
await page.click(selector)
|
||||||
|
await asyncio.sleep(0.5) # Brief wait for any navigation/updates
|
||||||
|
return {
|
||||||
|
"status": "clicked",
|
||||||
|
"selector": selector,
|
||||||
|
"url": page.url,
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {"error": f"Failed to click '{selector}': {str(e)}"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def type_text(
|
||||||
|
selector: str,
|
||||||
|
text: str,
|
||||||
|
page_id: str = "default",
|
||||||
|
delay: int = 50,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Type text into an input field.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
selector: CSS selector for the input element
|
||||||
|
text: Text to type
|
||||||
|
page_id: Page identifier (default: "default")
|
||||||
|
delay: Delay between key presses in ms (default: 50)
|
||||||
|
"""
|
||||||
|
page = await _get_page(page_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await page.waitForSelector(selector, timeout=5000)
|
||||||
|
await page.type(selector, text, delay=delay)
|
||||||
|
return {
|
||||||
|
"status": "typed",
|
||||||
|
"selector": selector,
|
||||||
|
"text_length": len(text),
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {"error": f"Failed to type into '{selector}': {str(e)}"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def evaluate_javascript(
|
||||||
|
script: str,
|
||||||
|
page_id: str = "default",
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Execute JavaScript code in the browser context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
script: JavaScript code to evaluate
|
||||||
|
page_id: Page identifier (default: "default")
|
||||||
|
"""
|
||||||
|
page = await _get_page(page_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await page.evaluate(script)
|
||||||
|
return {
|
||||||
|
"status": "evaluated",
|
||||||
|
"result": result,
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {"error": f"JS evaluation failed: {str(e)}"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def generate_pdf(
|
||||||
|
page_id: str = "default",
|
||||||
|
format: str = "A4",
|
||||||
|
landscape: bool = False,
|
||||||
|
print_background: bool = True,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Generate a PDF of the current page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_id: Page identifier (default: "default")
|
||||||
|
format: Paper format: 'A4', 'Letter', 'Legal', etc.
|
||||||
|
landscape: Whether to use landscape orientation
|
||||||
|
print_background: Whether to print background graphics
|
||||||
|
"""
|
||||||
|
page = await _get_page(page_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdf_bytes = await page.pdf({
|
||||||
|
"format": format,
|
||||||
|
"landscape": landscape,
|
||||||
|
"printBackground": print_background,
|
||||||
|
})
|
||||||
|
b64 = base64.b64encode(pdf_bytes).decode("utf-8")
|
||||||
|
return {
|
||||||
|
"status": "pdf_generated",
|
||||||
|
"format": format,
|
||||||
|
"base64": b64,
|
||||||
|
"url": page.url,
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {"error": f"PDF generation failed: {str(e)}"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def wait_for_selector(
|
||||||
|
selector: str,
|
||||||
|
page_id: str = "default",
|
||||||
|
timeout: int = 10000,
|
||||||
|
visible: bool = True,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Wait for an element to appear on the page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
selector: CSS selector to wait for
|
||||||
|
page_id: Page identifier (default: "default")
|
||||||
|
timeout: Maximum wait time in ms (default: 10000)
|
||||||
|
visible: Whether element must be visible (default: True)
|
||||||
|
"""
|
||||||
|
page = await _get_page(page_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await page.waitForSelector(
|
||||||
|
selector, timeout=timeout, visible=visible
|
||||||
|
)
|
||||||
|
return {"status": "found", "selector": selector}
|
||||||
|
except Exception as e:
|
||||||
|
return {"error": f"Timeout waiting for '{selector}': {str(e)}"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def list_pages() -> Dict[str, Any]:
|
||||||
|
"""List all open browser pages/tabs."""
|
||||||
|
result = {}
|
||||||
|
for pid, page in _pages.items():
|
||||||
|
if not page.isClosed():
|
||||||
|
result[pid] = {
|
||||||
|
"url": page.url,
|
||||||
|
"title": await page.title(),
|
||||||
|
}
|
||||||
|
return {"pages": result, "count": len(result)}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def close_page(page_id: str = "default") -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Close a browser page/tab.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_id: Page identifier to close (default: "default")
|
||||||
|
"""
|
||||||
|
if page_id in _pages:
|
||||||
|
if not _pages[page_id].isClosed():
|
||||||
|
await _pages[page_id].close()
|
||||||
|
del _pages[page_id]
|
||||||
|
return {"status": "closed", "page_id": page_id}
|
||||||
|
return {"error": f"Page '{page_id}' not found"}
|
||||||
6
puppeteer-mcp/requirements.txt
Executable file
6
puppeteer-mcp/requirements.txt
Executable file
|
|
@ -0,0 +1,6 @@
|
||||||
|
mcp[cli]>=1.0.0
|
||||||
|
httpx>=0.27.0
|
||||||
|
pydantic>=2.0.0
|
||||||
|
uvicorn>=0.30.0
|
||||||
|
starlette>=0.38.0
|
||||||
|
pyppeteer>=2.0.0
|
||||||
18
sequential-thinking-mcp/Dockerfile
Executable file
18
sequential-thinking-mcp/Dockerfile
Executable file
|
|
@ -0,0 +1,18 @@
|
||||||
|
FROM python:3.12-slim-bookworm
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY sequential_thinking_mcp.py .
|
||||||
|
COPY entrypoint.py .
|
||||||
|
|
||||||
|
ENV PORT=8900
|
||||||
|
|
||||||
|
EXPOSE 8900
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=5s --start-period=15s \
|
||||||
|
CMD python3 -c "import urllib.request; urllib.request.urlopen('http://localhost:8900/mcp', timeout=5)"
|
||||||
|
|
||||||
|
CMD ["python3", "entrypoint.py"]
|
||||||
12
sequential-thinking-mcp/entrypoint.py
Executable file
12
sequential-thinking-mcp/entrypoint.py
Executable file
|
|
@ -0,0 +1,12 @@
|
||||||
|
import os
|
||||||
|
from sequential_thinking_mcp import mcp
|
||||||
|
from mcp.server.fastmcp.server import TransportSecuritySettings
|
||||||
|
|
||||||
|
mcp.settings.host = "0.0.0.0"
|
||||||
|
mcp.settings.port = int(os.environ.get("PORT", "8900"))
|
||||||
|
mcp.settings.transport_security = TransportSecuritySettings(
|
||||||
|
enable_dns_rebinding_protection=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
mcp.run(transport="streamable-http")
|
||||||
4
sequential-thinking-mcp/requirements.txt
Executable file
4
sequential-thinking-mcp/requirements.txt
Executable file
|
|
@ -0,0 +1,4 @@
|
||||||
|
mcp[cli]>=1.0.0
|
||||||
|
pydantic>=2.0.0
|
||||||
|
uvicorn>=0.30.0
|
||||||
|
starlette>=0.38.0
|
||||||
376
sequential-thinking-mcp/sequential_thinking_mcp.py
Executable file
376
sequential-thinking-mcp/sequential_thinking_mcp.py
Executable file
|
|
@ -0,0 +1,376 @@
|
||||||
|
"""
|
||||||
|
Sequential Thinking MCP Server
|
||||||
|
===============================
|
||||||
|
MCP server that provides structured, step-by-step thinking tools for
|
||||||
|
complex problem solving. Supports creating thinking chains, branching
|
||||||
|
hypotheses, revising earlier steps, and synthesizing conclusions.
|
||||||
|
Helps LLMs reason through multi-step problems methodically.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from typing import Optional, List, Dict, Any
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from mcp.server.fastmcp import FastMCP
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# MCP Server
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
mcp = FastMCP("sequential_thinking_mcp")
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# In-memory session storage
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
_sessions: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
class ThoughtType(str, Enum):
|
||||||
|
OBSERVATION = "observation"
|
||||||
|
HYPOTHESIS = "hypothesis"
|
||||||
|
ANALYSIS = "analysis"
|
||||||
|
CONCLUSION = "conclusion"
|
||||||
|
REVISION = "revision"
|
||||||
|
QUESTION = "question"
|
||||||
|
EVIDENCE = "evidence"
|
||||||
|
COUNTER_ARGUMENT = "counter_argument"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tools
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def start_thinking_session(
|
||||||
|
topic: str,
|
||||||
|
context: Optional[str] = None,
|
||||||
|
session_id: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Start a new sequential thinking session for a topic.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
topic: The main topic or problem to think through
|
||||||
|
context: Optional background context or constraints
|
||||||
|
session_id: Optional custom session ID (auto-generated if not provided)
|
||||||
|
"""
|
||||||
|
sid = session_id or f"think_{int(time.time() * 1000)}"
|
||||||
|
now = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
_sessions[sid] = {
|
||||||
|
"session_id": sid,
|
||||||
|
"topic": topic,
|
||||||
|
"context": context,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
"thoughts": [],
|
||||||
|
"branches": {},
|
||||||
|
"status": "active",
|
||||||
|
"conclusion": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"session_id": sid,
|
||||||
|
"topic": topic,
|
||||||
|
"status": "active",
|
||||||
|
"message": "Thinking session started. Add thoughts with add_thought.",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def add_thought(
|
||||||
|
session_id: str,
|
||||||
|
thought: str,
|
||||||
|
thought_type: str = "analysis",
|
||||||
|
confidence: Optional[float] = None,
|
||||||
|
references_steps: Optional[List[int]] = None,
|
||||||
|
branch: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Add a thought/reasoning step to a thinking session.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session_id: The thinking session ID
|
||||||
|
thought: The thought or reasoning content
|
||||||
|
thought_type: Type of thought: 'observation', 'hypothesis', 'analysis', 'conclusion', 'revision', 'question', 'evidence', 'counter_argument'
|
||||||
|
confidence: Optional confidence level 0.0-1.0
|
||||||
|
references_steps: Optional list of step numbers this thought builds on
|
||||||
|
branch: Optional branch name for alternative reasoning paths
|
||||||
|
"""
|
||||||
|
if session_id not in _sessions:
|
||||||
|
return {"error": f"Session '{session_id}' not found"}
|
||||||
|
|
||||||
|
session = _sessions[session_id]
|
||||||
|
now = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
# Determine step number
|
||||||
|
if branch:
|
||||||
|
if branch not in session["branches"]:
|
||||||
|
session["branches"][branch] = []
|
||||||
|
step_num = len(session["branches"][branch]) + 1
|
||||||
|
target_list = session["branches"][branch]
|
||||||
|
else:
|
||||||
|
step_num = len(session["thoughts"]) + 1
|
||||||
|
target_list = session["thoughts"]
|
||||||
|
|
||||||
|
thought_entry = {
|
||||||
|
"step": step_num,
|
||||||
|
"thought": thought,
|
||||||
|
"type": thought_type,
|
||||||
|
"confidence": confidence,
|
||||||
|
"references": references_steps or [],
|
||||||
|
"branch": branch,
|
||||||
|
"timestamp": now,
|
||||||
|
}
|
||||||
|
|
||||||
|
target_list.append(thought_entry)
|
||||||
|
session["updated_at"] = now
|
||||||
|
|
||||||
|
return {
|
||||||
|
"session_id": session_id,
|
||||||
|
"step": step_num,
|
||||||
|
"branch": branch,
|
||||||
|
"type": thought_type,
|
||||||
|
"total_steps": len(session["thoughts"]),
|
||||||
|
"total_branches": len(session["branches"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def revise_thought(
|
||||||
|
session_id: str,
|
||||||
|
step_number: int,
|
||||||
|
revised_thought: str,
|
||||||
|
reason: str,
|
||||||
|
branch: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Revise an earlier thought step with new reasoning.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session_id: The thinking session ID
|
||||||
|
step_number: The step number to revise
|
||||||
|
revised_thought: The new thought content
|
||||||
|
reason: Reason for the revision
|
||||||
|
branch: Optional branch name if revising a branched thought
|
||||||
|
"""
|
||||||
|
if session_id not in _sessions:
|
||||||
|
return {"error": f"Session '{session_id}' not found"}
|
||||||
|
|
||||||
|
session = _sessions[session_id]
|
||||||
|
|
||||||
|
if branch:
|
||||||
|
target_list = session["branches"].get(branch, [])
|
||||||
|
else:
|
||||||
|
target_list = session["thoughts"]
|
||||||
|
|
||||||
|
# Find the step
|
||||||
|
for entry in target_list:
|
||||||
|
if entry["step"] == step_number:
|
||||||
|
entry["original_thought"] = entry["thought"]
|
||||||
|
entry["thought"] = revised_thought
|
||||||
|
entry["revision_reason"] = reason
|
||||||
|
entry["revised_at"] = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
# Also add a revision note to the main chain
|
||||||
|
await add_thought(
|
||||||
|
session_id=session_id,
|
||||||
|
thought=f"[Revision of step {step_number}] {reason}: {revised_thought}",
|
||||||
|
thought_type="revision",
|
||||||
|
references_steps=[step_number],
|
||||||
|
branch=branch,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "revised",
|
||||||
|
"step": step_number,
|
||||||
|
"reason": reason,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {"error": f"Step {step_number} not found"}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def get_thinking_chain(
|
||||||
|
session_id: str,
|
||||||
|
branch: Optional[str] = None,
|
||||||
|
include_revisions: bool = True,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get the full chain of thoughts for a session.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session_id: The thinking session ID
|
||||||
|
branch: Optional branch name to get (None for main chain)
|
||||||
|
include_revisions: Whether to include revision history
|
||||||
|
"""
|
||||||
|
if session_id not in _sessions:
|
||||||
|
return {"error": f"Session '{session_id}' not found"}
|
||||||
|
|
||||||
|
session = _sessions[session_id]
|
||||||
|
|
||||||
|
if branch:
|
||||||
|
thoughts = session["branches"].get(branch, [])
|
||||||
|
else:
|
||||||
|
thoughts = session["thoughts"]
|
||||||
|
|
||||||
|
if not include_revisions:
|
||||||
|
thoughts = [
|
||||||
|
{k: v for k, v in t.items() if k not in ("original_thought", "revision_reason", "revised_at")}
|
||||||
|
for t in thoughts
|
||||||
|
]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"session_id": session_id,
|
||||||
|
"topic": session["topic"],
|
||||||
|
"context": session["context"],
|
||||||
|
"branch": branch,
|
||||||
|
"thoughts": thoughts,
|
||||||
|
"total_steps": len(thoughts),
|
||||||
|
"branches_available": list(session["branches"].keys()),
|
||||||
|
"status": session["status"],
|
||||||
|
"conclusion": session["conclusion"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def synthesize_conclusion(
|
||||||
|
session_id: str,
|
||||||
|
conclusion: str,
|
||||||
|
confidence: Optional[float] = None,
|
||||||
|
key_insights: Optional[List[str]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Synthesize a conclusion from the thinking chain.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session_id: The thinking session ID
|
||||||
|
conclusion: The synthesized conclusion
|
||||||
|
confidence: Overall confidence level 0.0-1.0
|
||||||
|
key_insights: Optional list of key insights from the thinking process
|
||||||
|
"""
|
||||||
|
if session_id not in _sessions:
|
||||||
|
return {"error": f"Session '{session_id}' not found"}
|
||||||
|
|
||||||
|
session = _sessions[session_id]
|
||||||
|
now = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
session["conclusion"] = {
|
||||||
|
"text": conclusion,
|
||||||
|
"confidence": confidence,
|
||||||
|
"key_insights": key_insights or [],
|
||||||
|
"concluded_at": now,
|
||||||
|
"based_on_steps": len(session["thoughts"]),
|
||||||
|
"branches_considered": list(session["branches"].keys()),
|
||||||
|
}
|
||||||
|
session["status"] = "concluded"
|
||||||
|
session["updated_at"] = now
|
||||||
|
|
||||||
|
# Add conclusion as final thought
|
||||||
|
await add_thought(
|
||||||
|
session_id=session_id,
|
||||||
|
thought=conclusion,
|
||||||
|
thought_type="conclusion",
|
||||||
|
confidence=confidence,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"session_id": session_id,
|
||||||
|
"status": "concluded",
|
||||||
|
"conclusion": session["conclusion"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def compare_branches(
|
||||||
|
session_id: str,
|
||||||
|
branch_names: Optional[List[str]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Compare different reasoning branches in a session.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session_id: The thinking session ID
|
||||||
|
branch_names: Optional list of branches to compare (all if not specified)
|
||||||
|
"""
|
||||||
|
if session_id not in _sessions:
|
||||||
|
return {"error": f"Session '{session_id}' not found"}
|
||||||
|
|
||||||
|
session = _sessions[session_id]
|
||||||
|
branches = branch_names or list(session["branches"].keys())
|
||||||
|
|
||||||
|
comparison = {
|
||||||
|
"main_chain": {
|
||||||
|
"steps": len(session["thoughts"]),
|
||||||
|
"types": _count_types(session["thoughts"]),
|
||||||
|
"avg_confidence": _avg_confidence(session["thoughts"]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for branch in branches:
|
||||||
|
if branch in session["branches"]:
|
||||||
|
thoughts = session["branches"][branch]
|
||||||
|
comparison[branch] = {
|
||||||
|
"steps": len(thoughts),
|
||||||
|
"types": _count_types(thoughts),
|
||||||
|
"avg_confidence": _avg_confidence(thoughts),
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"session_id": session_id,
|
||||||
|
"comparison": comparison,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _count_types(thoughts: List[Dict]) -> Dict[str, int]:
|
||||||
|
counts: Dict[str, int] = {}
|
||||||
|
for t in thoughts:
|
||||||
|
tt = t.get("type", "unknown")
|
||||||
|
counts[tt] = counts.get(tt, 0) + 1
|
||||||
|
return counts
|
||||||
|
|
||||||
|
|
||||||
|
def _avg_confidence(thoughts: List[Dict]) -> Optional[float]:
|
||||||
|
confs = [t["confidence"] for t in thoughts if t.get("confidence") is not None]
|
||||||
|
if not confs:
|
||||||
|
return None
|
||||||
|
return round(sum(confs) / len(confs), 3)
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def list_sessions(
|
||||||
|
status: Optional[str] = None,
|
||||||
|
limit: int = 20,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
List all thinking sessions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
status: Optional filter by status: 'active' or 'concluded'
|
||||||
|
limit: Maximum sessions to return
|
||||||
|
"""
|
||||||
|
sessions = []
|
||||||
|
for sid, session in _sessions.items():
|
||||||
|
if status and session["status"] != status:
|
||||||
|
continue
|
||||||
|
sessions.append({
|
||||||
|
"session_id": sid,
|
||||||
|
"topic": session["topic"],
|
||||||
|
"status": session["status"],
|
||||||
|
"total_steps": len(session["thoughts"]),
|
||||||
|
"branches": len(session["branches"]),
|
||||||
|
"created_at": session["created_at"],
|
||||||
|
"updated_at": session["updated_at"],
|
||||||
|
})
|
||||||
|
|
||||||
|
sessions.sort(key=lambda x: x["updated_at"], reverse=True)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total": len(sessions),
|
||||||
|
"sessions": sessions[:limit],
|
||||||
|
}
|
||||||
Loading…
Reference in a new issue