Initial commit: hardened DeerFlow factory

Vendored deer-flow upstream (bytedance/deer-flow) plus prompt-injection
hardening:

- New deerflow.security package: content_delimiter, html_cleaner,
  sanitizer (8 layers — invisible chars, control chars, symbols, NFC,
  PUA, tag chars, horizontal whitespace collapse with newline/tab
  preservation, length cap)
- New deerflow.community.searx package: web_search, web_fetch,
  image_search backed by a private SearX instance, every external
  string sanitized and wrapped in <<<EXTERNAL_UNTRUSTED_CONTENT>>>
  delimiters
- All native community web providers (ddg_search, tavily, exa,
  firecrawl, jina_ai, infoquest, image_search) replaced with hard-fail
  stubs that raise NativeWebToolDisabledError at import time, so a
  misconfigured tool.use path fails loud rather than silently falling
  back to unsanitized output
- Native client back-doors (jina_client.py, infoquest_client.py)
  stubbed too
- Native-tool tests quarantined under tests/_disabled_native/
  (collect_ignore_glob via local conftest.py)
- Sanitizer Layer 7 fix: only collapse horizontal whitespace, preserve
  newlines and tabs so list/table structure survives
- Hardened runtime config.yaml references only the searx-backed tools
- Factory overlay (backend/) kept in sync with deer-flow tree as a
  reference / source

See HARDENING.md for the full audit trail and verification steps.
This commit is contained in:
2026-04-12 14:23:57 +02:00
commit 6de0bf9f5b
889 changed files with 173052 additions and 0 deletions

163
deer-flow/scripts/check.py Normal file
View File

@@ -0,0 +1,163 @@
#!/usr/bin/env python3
"""Cross-platform dependency checker for DeerFlow."""
from __future__ import annotations
import shutil
import subprocess
import sys
def configure_stdio() -> None:
"""Prefer UTF-8 output so Unicode status markers render on Windows."""
for stream_name in ("stdout", "stderr"):
stream = getattr(sys, stream_name, None)
if hasattr(stream, "reconfigure"):
try:
stream.reconfigure(encoding="utf-8", errors="replace")
except (OSError, ValueError):
continue
def run_command(command: list[str]) -> str | None:
"""Run a command and return trimmed stdout, or None on failure."""
try:
result = subprocess.run(command, capture_output=True, text=True, check=True, shell=False)
except (OSError, subprocess.CalledProcessError):
return None
return result.stdout.strip() or result.stderr.strip()
def find_pnpm_command() -> list[str] | None:
"""Return a pnpm-compatible command that exists on this machine."""
candidates = [["pnpm"], ["pnpm.cmd"]]
if shutil.which("corepack"):
candidates.append(["corepack", "pnpm"])
for command in candidates:
if shutil.which(command[0]):
return command
return None
def parse_node_major(version_text: str) -> int | None:
version = version_text.strip()
if version.startswith("v"):
version = version[1:]
major_str = version.split(".", 1)[0]
if not major_str.isdigit():
return None
return int(major_str)
def main() -> int:
configure_stdio()
print("==========================================")
print(" Checking Required Dependencies")
print("==========================================")
print()
failed = False
print("Checking Node.js...")
node_path = shutil.which("node")
if node_path:
node_version = run_command(["node", "-v"])
if node_version:
major = parse_node_major(node_version)
if major is not None and major >= 22:
print(f" OK Node.js {node_version.lstrip('v')} (>= 22 required)")
else:
print(
f" FAIL Node.js {node_version.lstrip('v')} found, but version 22+ is required"
)
print(" Install from: https://nodejs.org/")
failed = True
else:
print(" INFO Unable to determine Node.js version")
print(" Install from: https://nodejs.org/")
failed = True
else:
print(" FAIL Node.js not found (version 22+ required)")
print(" Install from: https://nodejs.org/")
failed = True
print()
print("Checking pnpm...")
pnpm_command = find_pnpm_command()
if pnpm_command:
pnpm_version = run_command([*pnpm_command, "-v"])
if pnpm_version:
if pnpm_command[0] == "corepack":
print(f" OK pnpm {pnpm_version} (via Corepack)")
else:
print(f" OK pnpm {pnpm_version}")
else:
print(" INFO Unable to determine pnpm version")
failed = True
else:
print(" FAIL pnpm not found")
print(" Install: npm install -g pnpm")
print(" Or enable Corepack: corepack enable")
print(" Or visit: https://pnpm.io/installation")
failed = True
print()
print("Checking uv...")
if shutil.which("uv"):
uv_version_text = run_command(["uv", "--version"])
if uv_version_text:
uv_version_parts = uv_version_text.split()
uv_version = uv_version_parts[1] if len(uv_version_parts) > 1 else uv_version_text
print(f" OK uv {uv_version}")
else:
print(" INFO Unable to determine uv version")
failed = True
else:
print(" FAIL uv not found")
print(" Visit the official installation guide for your platform:")
print(" https://docs.astral.sh/uv/getting-started/installation/")
failed = True
print()
print("Checking nginx...")
if shutil.which("nginx"):
nginx_version_text = run_command(["nginx", "-v"])
if nginx_version_text and "/" in nginx_version_text:
nginx_version = nginx_version_text.split("/", 1)[1]
print(f" OK nginx {nginx_version}")
else:
print(" INFO nginx (version unknown)")
else:
print(" FAIL nginx not found")
print(" macOS: brew install nginx")
print(" Ubuntu: sudo apt install nginx")
print(" Windows: use WSL for local mode or use Docker mode")
print(" Or visit: https://nginx.org/en/download.html")
failed = True
print()
if not failed:
print("==========================================")
print(" OK All dependencies are installed!")
print("==========================================")
print()
print("You can now run:")
print(" make install - Install project dependencies")
print(" make setup - Create a minimal working config (recommended)")
print(" make config - Copy the full config template (manual setup)")
print(" make doctor - Verify config and dependency health")
print(" make dev - Start development server")
print(" make start - Start production server")
return 0
print("==========================================")
print(" FAIL Some dependencies are missing")
print("==========================================")
print()
print("Please install the missing tools and run 'make check' again.")
return 1
if __name__ == "__main__":
sys.exit(main())

85
deer-flow/scripts/check.sh Executable file
View File

@@ -0,0 +1,85 @@
#!/usr/bin/env bash
set -euo pipefail
echo "=========================================="
echo " Checking Required Dependencies"
echo "=========================================="
echo ""
FAILED=0
echo "Checking Node.js..."
if command -v node >/dev/null 2>&1; then
NODE_VERSION=$(node -v | sed 's/v//')
NODE_MAJOR=$(echo "$NODE_VERSION" | cut -d. -f1)
if [ "$NODE_MAJOR" -ge 22 ]; then
echo " ✓ Node.js $NODE_VERSION (>= 22 required)"
else
echo " ✗ Node.js $NODE_VERSION found, but version 22+ is required"
echo " Install from: https://nodejs.org/"
FAILED=1
fi
else
echo " ✗ Node.js not found (version 22+ required)"
echo " Install from: https://nodejs.org/"
FAILED=1
fi
echo ""
echo "Checking pnpm..."
if command -v pnpm >/dev/null 2>&1; then
PNPM_VERSION=$(pnpm -v)
echo " ✓ pnpm $PNPM_VERSION"
else
echo " ✗ pnpm not found"
echo " Install: npm install -g pnpm"
echo " Or visit: https://pnpm.io/installation"
FAILED=1
fi
echo ""
echo "Checking uv..."
if command -v uv >/dev/null 2>&1; then
UV_VERSION=$(uv --version | awk '{print $2}')
echo " ✓ uv $UV_VERSION"
else
echo " ✗ uv not found"
echo " Install: curl -LsSf https://astral.sh/uv/install.sh | sh"
echo " Or visit: https://docs.astral.sh/uv/getting-started/installation/"
FAILED=1
fi
echo ""
echo "Checking nginx..."
if command -v nginx >/dev/null 2>&1; then
NGINX_VERSION=$(nginx -v 2>&1 | awk -F'/' '{print $2}')
echo " ✓ nginx $NGINX_VERSION"
else
echo " ✗ nginx not found"
echo " macOS: brew install nginx"
echo " Ubuntu: sudo apt install nginx"
echo " Or visit: https://nginx.org/en/download.html"
FAILED=1
fi
echo ""
if [ "$FAILED" -eq 0 ]; then
echo "=========================================="
echo " ✓ All dependencies are installed!"
echo "=========================================="
echo ""
echo "You can now run:"
echo " make install - Install project dependencies"
echo " make setup - Create a minimal working config (recommended)"
echo " make config - Copy the full config template (manual setup)"
echo " make doctor - Verify config and dependency health"
echo " make dev - Start development server"
echo " make start - Start production server"
else
echo "=========================================="
echo " ✗ Some dependencies are missing"
echo "=========================================="
echo ""
echo "Please install the missing tools and run 'make check' again."
exit 1
fi

View File

@@ -0,0 +1,95 @@
#!/usr/bin/env bash
#
# cleanup-containers.sh - Clean up DeerFlow sandbox containers
#
# This script cleans up both Docker and Apple Container runtime containers
# to ensure compatibility across different container runtimes.
#
set -e
PREFIX="${1:-deer-flow-sandbox}"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
echo "Cleaning up sandbox containers with prefix: ${PREFIX}"
# Function to clean up Docker containers
cleanup_docker() {
if command -v docker &> /dev/null; then
echo -n "Checking Docker containers... "
DOCKER_CONTAINERS=$(docker ps -q --filter "name=${PREFIX}" 2>/dev/null || echo "")
if [ -n "$DOCKER_CONTAINERS" ]; then
echo ""
echo "Found Docker containers to clean up:"
docker ps --filter "name=${PREFIX}" --format "table {{.ID}}\t{{.Names}}\t{{.Status}}"
echo "Stopping Docker containers..."
echo "$DOCKER_CONTAINERS" | xargs docker stop 2>/dev/null || true
echo -e "${GREEN}✓ Docker containers stopped${NC}"
else
echo -e "${GREEN}none found${NC}"
fi
else
echo "Docker not found, skipping..."
fi
}
# Function to clean up Apple Container containers
cleanup_apple_container() {
if command -v container &> /dev/null; then
echo -n "Checking Apple Container containers... "
# List all containers and filter by name
CONTAINER_LIST=$(container list --format json 2>/dev/null || echo "[]")
if [ "$CONTAINER_LIST" != "[]" ] && [ -n "$CONTAINER_LIST" ]; then
# Extract container IDs that match our prefix
CONTAINER_IDS=$(echo "$CONTAINER_LIST" | python3 -c "
import json
import sys
try:
containers = json.load(sys.stdin)
if isinstance(containers, list):
for c in containers:
if isinstance(c, dict):
# Apple Container uses 'id' field which contains the container name
cid = c.get('configuration').get('id', '')
if '${PREFIX}' in cid:
print(cid)
except:
pass
" 2>/dev/null || echo "")
if [ -n "$CONTAINER_IDS" ]; then
echo ""
echo "Found Apple Container containers to clean up:"
echo "$CONTAINER_IDS" | while read -r cid; do
echo " - $cid"
done
echo "Stopping Apple Container containers..."
echo "$CONTAINER_IDS" | while read -r cid; do
container stop "$cid" 2>/dev/null || true
done
echo -e "${GREEN}✓ Apple Container containers stopped${NC}"
else
echo -e "${GREEN}none found${NC}"
fi
else
echo -e "${GREEN}none found${NC}"
fi
else
echo "Apple Container not found, skipping..."
fi
}
# Clean up both runtimes
cleanup_docker
cleanup_apple_container
echo -e "${GREEN}✓ Container cleanup complete${NC}"

View File

@@ -0,0 +1,155 @@
#!/usr/bin/env bash
#
# config-upgrade.sh - Upgrade config.yaml to match config.example.yaml
#
# 1. Runs version-specific migrations (value replacements, renames, etc.)
# 2. Merges missing fields from the example into the user config
# 3. Backs up config.yaml to config.yaml.bak before modifying.
set -e
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
EXAMPLE="$REPO_ROOT/config.example.yaml"
# Resolve config.yaml location: env var > backend/ > repo root
if [ -n "$DEER_FLOW_CONFIG_PATH" ] && [ -f "$DEER_FLOW_CONFIG_PATH" ]; then
CONFIG="$DEER_FLOW_CONFIG_PATH"
elif [ -f "$REPO_ROOT/backend/config.yaml" ]; then
CONFIG="$REPO_ROOT/backend/config.yaml"
elif [ -f "$REPO_ROOT/config.yaml" ]; then
CONFIG="$REPO_ROOT/config.yaml"
else
CONFIG=""
fi
if [ ! -f "$EXAMPLE" ]; then
echo "✗ config.example.yaml not found at $EXAMPLE"
exit 1
fi
if [ -z "$CONFIG" ]; then
echo "No config.yaml found — creating from example..."
cp "$EXAMPLE" "$REPO_ROOT/config.yaml"
echo "OK config.yaml created. Please review and set your API keys."
exit 0
fi
# Use inline Python to do migrations + recursive merge with PyYAML
if command -v cygpath >/dev/null 2>&1; then
CONFIG_WIN="$(cygpath -w "$CONFIG")"
EXAMPLE_WIN="$(cygpath -w "$EXAMPLE")"
else
CONFIG_WIN="$CONFIG"
EXAMPLE_WIN="$EXAMPLE"
fi
cd "$REPO_ROOT/backend" && CONFIG_WIN_PATH="$CONFIG_WIN" EXAMPLE_WIN_PATH="$EXAMPLE_WIN" uv run python -c "
import os
import sys, shutil, copy, re
from pathlib import Path
import yaml
config_path = Path(os.environ['CONFIG_WIN_PATH'])
example_path = Path(os.environ['EXAMPLE_WIN_PATH'])
with open(config_path, encoding='utf-8') as f:
raw_text = f.read()
user = yaml.safe_load(raw_text) or {}
with open(example_path, encoding='utf-8') as f:
example = yaml.safe_load(f) or {}
user_version = user.get('config_version', 0)
example_version = example.get('config_version', 0)
if user_version >= example_version:
print(f'OK config.yaml is already up to date (version {user_version}).')
sys.exit(0)
print(f'Upgrading config.yaml: version {user_version} -> {example_version}')
print()
# ── Migrations ───────────────────────────────────────────────────────────
# Each migration targets a specific version upgrade.
# 'replacements': list of (old_string, new_string) applied to the raw YAML text.
# This handles value changes that a dict merge cannot catch.
MIGRATIONS = {
1: {
'description': 'Rename src.* module paths to deerflow.*',
'replacements': [
('src.community.', 'deerflow.community.'),
('src.sandbox.', 'deerflow.sandbox.'),
('src.models.', 'deerflow.models.'),
('src.tools.', 'deerflow.tools.'),
],
},
# Future migrations go here:
# 2: {
# 'description': '...',
# 'replacements': [('old', 'new')],
# },
}
# Apply migrations in order for versions (user_version, example_version]
migrated = []
for version in range(user_version + 1, example_version + 1):
migration = MIGRATIONS.get(version)
if not migration:
continue
desc = migration.get('description', f'Migration to v{version}')
for old, new in migration.get('replacements', []):
if old in raw_text:
raw_text = raw_text.replace(old, new)
migrated.append(f'{old} -> {new}')
# Re-parse after text migrations
user = yaml.safe_load(raw_text) or {}
if migrated:
print(f'Applied {len(migrated)} migration(s):')
for m in migrated:
print(f' ~ {m}')
print()
# ── Merge missing fields ─────────────────────────────────────────────────
added = []
def merge(target, source, path=''):
\"\"\"Recursively merge source into target, adding missing keys only.\"\"\"
for key, value in source.items():
key_path = f'{path}.{key}' if path else key
if key not in target:
target[key] = copy.deepcopy(value)
added.append(key_path)
elif isinstance(value, dict) and isinstance(target[key], dict):
merge(target[key], value, key_path)
merge(user, example)
# Always update config_version
user['config_version'] = example_version
# ── Write ─────────────────────────────────────────────────────────────────
backup = config_path.with_suffix('.yaml.bak')
shutil.copy2(config_path, backup)
print(f'Backed up to {backup.name}')
with open(config_path, 'w', encoding='utf-8') as f:
yaml.dump(user, f, default_flow_style=False, allow_unicode=True, sort_keys=False)
if added:
print(f'Added {len(added)} new field(s):')
for a in added:
print(f' + {a}')
if not migrated and not added:
print('No changes needed (version bumped only).')
print()
print(f'OK config.yaml upgraded to version {example_version}.')
print(' Please review the changes and set any new required values.')
"

View File

@@ -0,0 +1,58 @@
#!/usr/bin/env python3
"""Cross-platform config bootstrap script for DeerFlow."""
from __future__ import annotations
import shutil
import sys
from pathlib import Path
def copy_if_missing(src: Path, dst: Path) -> None:
if dst.exists():
return
if not src.exists():
raise FileNotFoundError(f"Missing template file: {src}")
dst.parent.mkdir(parents=True, exist_ok=True)
shutil.copyfile(src, dst)
def main() -> int:
project_root = Path(__file__).resolve().parent.parent
existing_config = [
project_root / "config.yaml",
project_root / "config.yml",
project_root / "configure.yml",
]
if any(path.exists() for path in existing_config):
print(
"Error: configuration file already exists "
"(config.yaml/config.yml/configure.yml). Aborting."
)
return 1
try:
copy_if_missing(project_root / "config.example.yaml", project_root / "config.yaml")
copy_if_missing(project_root / ".env.example", project_root / ".env")
copy_if_missing(
project_root / "frontend" / ".env.example",
project_root / "frontend" / ".env",
)
except (FileNotFoundError, OSError) as exc:
print("Error while generating configuration files:")
print(f" {exc}")
if isinstance(exc, PermissionError):
print(
"Hint: Check file permissions and ensure the files are not "
"read-only or locked by another process."
)
return 1
print("✓ Configuration files generated")
return 0
if __name__ == "__main__":
sys.exit(main())

300
deer-flow/scripts/deploy.sh Executable file
View File

@@ -0,0 +1,300 @@
#!/usr/bin/env bash
#
# deploy.sh - Build, start, or stop DeerFlow production services
#
# Commands:
# deploy.sh [--MODE] — build + start (default: --standard)
# deploy.sh build — build all images (mode-agnostic)
# deploy.sh start [--MODE] — start from pre-built images (default: --standard)
# deploy.sh down — stop and remove containers
#
# Runtime modes:
# --standard (default) All services including LangGraph server.
# --gateway No LangGraph container; nginx routes /api/langgraph/*
# to the Gateway compat API instead.
#
# Sandbox mode (local / aio / provisioner) is auto-detected from config.yaml.
#
# Examples:
# deploy.sh # build + start in standard mode
# deploy.sh --gateway # build + start in gateway mode
# deploy.sh build # build all images
# deploy.sh start --gateway # start pre-built images in gateway mode
# deploy.sh down # stop and remove containers
#
# Must be run from the repo root directory.
set -e
RUNTIME_MODE="standard"
case "${1:-}" in
build|start|down)
CMD="$1"
if [ -n "${2:-}" ]; then
case "$2" in
--standard) RUNTIME_MODE="standard" ;;
--gateway) RUNTIME_MODE="gateway" ;;
*) echo "Unknown mode: $2"; echo "Usage: deploy.sh [build|start|down] [--standard|--gateway]"; exit 1 ;;
esac
fi
;;
--standard|--gateway)
CMD=""
RUNTIME_MODE="${1#--}"
;;
"")
CMD=""
;;
*)
echo "Unknown argument: $1"
echo "Usage: deploy.sh [build|start|down] [--standard|--gateway]"
exit 1
;;
esac
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
cd "$REPO_ROOT"
DOCKER_DIR="$REPO_ROOT/docker"
COMPOSE_CMD=(docker compose -p deer-flow -f "$DOCKER_DIR/docker-compose.yaml")
# ── Colors ────────────────────────────────────────────────────────────────────
GREEN='\033[0;32m'
BLUE='\033[0;34m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
# ── DEER_FLOW_HOME ────────────────────────────────────────────────────────────
if [ -z "$DEER_FLOW_HOME" ]; then
export DEER_FLOW_HOME="$REPO_ROOT/backend/.deer-flow"
fi
echo -e "${BLUE}DEER_FLOW_HOME=$DEER_FLOW_HOME${NC}"
mkdir -p "$DEER_FLOW_HOME"
# ── DEER_FLOW_REPO_ROOT (for skills host path in DooD) ───────────────────────
export DEER_FLOW_REPO_ROOT="$REPO_ROOT"
# ── config.yaml ───────────────────────────────────────────────────────────────
if [ -z "$DEER_FLOW_CONFIG_PATH" ]; then
export DEER_FLOW_CONFIG_PATH="$REPO_ROOT/config.yaml"
fi
if [ ! -f "$DEER_FLOW_CONFIG_PATH" ]; then
# Try to seed from repo (config.example.yaml is the canonical template)
if [ -f "$REPO_ROOT/config.example.yaml" ]; then
cp "$REPO_ROOT/config.example.yaml" "$DEER_FLOW_CONFIG_PATH"
echo -e "${GREEN}✓ Seeded config.example.yaml → $DEER_FLOW_CONFIG_PATH${NC}"
echo -e "${YELLOW}⚠ config.yaml was seeded from the example template.${NC}"
echo " Run 'make setup' to generate a minimal config, or edit $DEER_FLOW_CONFIG_PATH manually before use."
else
echo -e "${RED}✗ No config.yaml found.${NC}"
echo " Run 'make setup' from the repo root (recommended),"
echo " or 'make config' for the full template, then set the required model API keys."
exit 1
fi
else
echo -e "${GREEN}✓ config.yaml: $DEER_FLOW_CONFIG_PATH${NC}"
fi
# ── extensions_config.json ───────────────────────────────────────────────────
if [ -z "$DEER_FLOW_EXTENSIONS_CONFIG_PATH" ]; then
export DEER_FLOW_EXTENSIONS_CONFIG_PATH="$REPO_ROOT/extensions_config.json"
fi
if [ ! -f "$DEER_FLOW_EXTENSIONS_CONFIG_PATH" ]; then
if [ -f "$REPO_ROOT/extensions_config.json" ]; then
cp "$REPO_ROOT/extensions_config.json" "$DEER_FLOW_EXTENSIONS_CONFIG_PATH"
echo -e "${GREEN}✓ Seeded extensions_config.json → $DEER_FLOW_EXTENSIONS_CONFIG_PATH${NC}"
else
# Create a minimal empty config so the gateway doesn't fail on startup
echo '{"mcpServers":{},"skills":{}}' > "$DEER_FLOW_EXTENSIONS_CONFIG_PATH"
echo -e "${YELLOW}⚠ extensions_config.json not found, created empty config at $DEER_FLOW_EXTENSIONS_CONFIG_PATH${NC}"
fi
else
echo -e "${GREEN}✓ extensions_config.json: $DEER_FLOW_EXTENSIONS_CONFIG_PATH${NC}"
fi
# ── BETTER_AUTH_SECRET ───────────────────────────────────────────────────────
# Required by Next.js in production. Generated once and persisted so auth
# sessions survive container restarts.
_secret_file="$DEER_FLOW_HOME/.better-auth-secret"
if [ -z "$BETTER_AUTH_SECRET" ]; then
if [ -f "$_secret_file" ]; then
export BETTER_AUTH_SECRET
BETTER_AUTH_SECRET="$(cat "$_secret_file")"
echo -e "${GREEN}✓ BETTER_AUTH_SECRET loaded from $_secret_file${NC}"
else
export BETTER_AUTH_SECRET
BETTER_AUTH_SECRET="$(python3 -c 'import secrets; print(secrets.token_hex(32))')"
echo "$BETTER_AUTH_SECRET" > "$_secret_file"
chmod 600 "$_secret_file"
echo -e "${GREEN}✓ BETTER_AUTH_SECRET generated → $_secret_file${NC}"
fi
fi
# ── detect_sandbox_mode ───────────────────────────────────────────────────────
detect_sandbox_mode() {
local sandbox_use=""
local provisioner_url=""
[ -f "$DEER_FLOW_CONFIG_PATH" ] || { echo "local"; return; }
sandbox_use=$(awk '
/^[[:space:]]*sandbox:[[:space:]]*$/ { in_sandbox=1; next }
in_sandbox && /^[^[:space:]#]/ { in_sandbox=0 }
in_sandbox && /^[[:space:]]*use:[[:space:]]*/ {
line=$0; sub(/^[[:space:]]*use:[[:space:]]*/, "", line); print line; exit
}
' "$DEER_FLOW_CONFIG_PATH")
provisioner_url=$(awk '
/^[[:space:]]*sandbox:[[:space:]]*$/ { in_sandbox=1; next }
in_sandbox && /^[^[:space:]#]/ { in_sandbox=0 }
in_sandbox && /^[[:space:]]*provisioner_url:[[:space:]]*/ {
line=$0; sub(/^[[:space:]]*provisioner_url:[[:space:]]*/, "", line); print line; exit
}
' "$DEER_FLOW_CONFIG_PATH")
if [[ "$sandbox_use" == *"deerflow.community.aio_sandbox:AioSandboxProvider"* ]]; then
if [ -n "$provisioner_url" ]; then
echo "provisioner"
else
echo "aio"
fi
else
echo "local"
fi
}
# ── down ──────────────────────────────────────────────────────────────────────
if [ "$CMD" = "down" ]; then
# Set minimal env var defaults so docker compose can parse the file without
# warning about unset variables that appear in volume specs.
export DEER_FLOW_HOME="${DEER_FLOW_HOME:-$REPO_ROOT/backend/.deer-flow}"
export DEER_FLOW_CONFIG_PATH="${DEER_FLOW_CONFIG_PATH:-$DEER_FLOW_HOME/config.yaml}"
export DEER_FLOW_EXTENSIONS_CONFIG_PATH="${DEER_FLOW_EXTENSIONS_CONFIG_PATH:-$DEER_FLOW_HOME/extensions_config.json}"
export DEER_FLOW_DOCKER_SOCKET="${DEER_FLOW_DOCKER_SOCKET:-/var/run/docker.sock}"
export DEER_FLOW_REPO_ROOT="${DEER_FLOW_REPO_ROOT:-$REPO_ROOT}"
export BETTER_AUTH_SECRET="${BETTER_AUTH_SECRET:-placeholder}"
"${COMPOSE_CMD[@]}" down
exit 0
fi
# ── build ────────────────────────────────────────────────────────────────────
# Build produces mode-agnostic images. No --gateway or sandbox detection needed.
if [ "$CMD" = "build" ]; then
echo "=========================================="
echo " DeerFlow — Building Images"
echo "=========================================="
echo ""
# Docker socket is needed for compose to parse volume specs
if [ -z "$DEER_FLOW_DOCKER_SOCKET" ]; then
export DEER_FLOW_DOCKER_SOCKET="/var/run/docker.sock"
fi
"${COMPOSE_CMD[@]}" build
echo ""
echo "=========================================="
echo " ✓ Images built successfully"
echo "=========================================="
echo ""
echo " Next: deploy.sh start [--gateway]"
echo ""
exit 0
fi
# ── Banner ────────────────────────────────────────────────────────────────────
echo "=========================================="
echo " DeerFlow Production Deployment"
echo "=========================================="
echo ""
# ── Detect runtime configuration ────────────────────────────────────────────
# Only needed for start / up — determines which containers to launch.
sandbox_mode="$(detect_sandbox_mode)"
echo -e "${BLUE}Sandbox mode: $sandbox_mode${NC}"
echo -e "${BLUE}Runtime mode: $RUNTIME_MODE${NC}"
case "$RUNTIME_MODE" in
gateway)
export LANGGRAPH_UPSTREAM=gateway:8001
export LANGGRAPH_REWRITE=/api/
services="frontend gateway nginx"
;;
standard)
services="frontend gateway langgraph nginx"
;;
esac
if [ "$sandbox_mode" = "provisioner" ]; then
services="$services provisioner"
fi
# ── DEER_FLOW_DOCKER_SOCKET ───────────────────────────────────────────────────
if [ -z "$DEER_FLOW_DOCKER_SOCKET" ]; then
export DEER_FLOW_DOCKER_SOCKET="/var/run/docker.sock"
fi
if [ "$sandbox_mode" != "local" ]; then
if [ ! -S "$DEER_FLOW_DOCKER_SOCKET" ]; then
echo -e "${RED}⚠ Docker socket not found at $DEER_FLOW_DOCKER_SOCKET${NC}"
echo " AioSandboxProvider (DooD) will not work."
exit 1
else
echo -e "${GREEN}✓ Docker socket: $DEER_FLOW_DOCKER_SOCKET${NC}"
fi
fi
echo ""
# ── Start / Up ───────────────────────────────────────────────────────────────
if [ "$CMD" = "start" ]; then
echo "Starting containers (no rebuild)..."
echo ""
# shellcheck disable=SC2086
"${COMPOSE_CMD[@]}" up -d --remove-orphans $services
else
# Default: build + start
echo "Building images and starting containers..."
echo ""
# shellcheck disable=SC2086
"${COMPOSE_CMD[@]}" up --build -d --remove-orphans $services
fi
echo ""
echo "=========================================="
echo " DeerFlow is running! ($RUNTIME_MODE mode)"
echo "=========================================="
echo ""
echo " 🌐 Application: http://localhost:${PORT:-2026}"
echo " 📡 API Gateway: http://localhost:${PORT:-2026}/api/*"
if [ "$RUNTIME_MODE" = "gateway" ]; then
echo " 🤖 Runtime: Gateway embedded"
echo " API: /api/langgraph/* → Gateway (compat)"
else
echo " 🤖 LangGraph: http://localhost:${PORT:-2026}/api/langgraph/*"
fi
echo ""
echo " Manage:"
echo " make down — stop and remove containers"
echo " make docker-logs — view logs"
echo ""

380
deer-flow/scripts/docker.sh Executable file
View File

@@ -0,0 +1,380 @@
#!/usr/bin/env bash
set -e
# Colors for output
GREEN='\033[0;32m'
BLUE='\033[0;34m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Get script directory
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
DOCKER_DIR="$PROJECT_ROOT/docker"
# Docker Compose command with project name
COMPOSE_CMD="docker compose -p deer-flow-dev -f docker-compose-dev.yaml"
detect_sandbox_mode() {
local config_file="$PROJECT_ROOT/config.yaml"
local sandbox_use=""
local provisioner_url=""
if [ ! -f "$config_file" ]; then
echo "local"
return
fi
sandbox_use=$(awk '
/^[[:space:]]*sandbox:[[:space:]]*$/ { in_sandbox=1; next }
in_sandbox && /^[^[:space:]#]/ { in_sandbox=0 }
in_sandbox && /^[[:space:]]*use:[[:space:]]*/ {
line=$0
sub(/^[[:space:]]*use:[[:space:]]*/, "", line)
print line
exit
}
' "$config_file")
provisioner_url=$(awk '
/^[[:space:]]*sandbox:[[:space:]]*$/ { in_sandbox=1; next }
in_sandbox && /^[^[:space:]#]/ { in_sandbox=0 }
in_sandbox && /^[[:space:]]*provisioner_url:[[:space:]]*/ {
line=$0
sub(/^[[:space:]]*provisioner_url:[[:space:]]*/, "", line)
print line
exit
}
' "$config_file")
if [[ "$sandbox_use" == *"deerflow.sandbox.local:LocalSandboxProvider"* ]]; then
echo "local"
elif [[ "$sandbox_use" == *"deerflow.community.aio_sandbox:AioSandboxProvider"* ]]; then
if [ -n "$provisioner_url" ]; then
echo "provisioner"
else
echo "aio"
fi
else
echo "local"
fi
}
# Cleanup function for Ctrl+C
cleanup() {
echo ""
echo -e "${YELLOW}Operation interrupted by user${NC}"
exit 130
}
# Set up trap for Ctrl+C
trap cleanup INT TERM
docker_available() {
# Check that the docker CLI exists
if ! command -v docker >/dev/null 2>&1; then
return 1
fi
# Check that the Docker daemon is reachable
if ! docker info >/dev/null 2>&1; then
return 1
fi
return 0
}
# Initialize: pre-pull the sandbox image so first Pod startup is fast
init() {
echo "=========================================="
echo " DeerFlow Init — Pull Sandbox Image"
echo "=========================================="
echo ""
SANDBOX_IMAGE="enterprise-public-cn-beijing.cr.volces.com/vefaas-public/all-in-one-sandbox:latest"
# Detect sandbox mode from config.yaml
local sandbox_mode
sandbox_mode="$(detect_sandbox_mode)"
# Skip image pull for local sandbox mode (no container image needed)
if [ "$sandbox_mode" = "local" ]; then
echo -e "${GREEN}Detected local sandbox mode — no Docker image required.${NC}"
echo ""
if docker_available; then
echo -e "${GREEN}✓ Docker environment is ready.${NC}"
echo ""
echo -e "${YELLOW}Next step: make docker-start${NC}"
else
echo -e "${YELLOW}Docker does not appear to be installed, or the Docker daemon is not reachable.${NC}"
echo "Local sandbox mode itself does not require Docker, but Docker-based workflows (e.g., docker-start) will fail until Docker is available."
echo ""
echo -e "${YELLOW}Install and start Docker, then run: make docker-init && make docker-start${NC}"
fi
return 0
fi
if ! docker images --format '{{.Repository}}:{{.Tag}}' | grep -q "^${SANDBOX_IMAGE}$"; then
echo -e "${BLUE}Pulling sandbox image: $SANDBOX_IMAGE ...${NC}"
echo ""
if ! docker pull "$SANDBOX_IMAGE" 2>&1; then
echo ""
echo -e "${YELLOW}⚠ Failed to pull sandbox image.${NC}"
echo ""
echo "This is expected if:"
echo " 1. You are using local sandbox mode (default — no image needed)"
echo " 2. You are behind a corporate proxy or firewall"
echo " 3. The registry requires authentication"
echo ""
echo -e "${GREEN}The Docker development environment can still be started.${NC}"
echo "If you need AIO sandbox (container-based execution):"
echo " - Ensure you have network access to the registry"
echo " - Or configure a custom sandbox image in config.yaml"
echo ""
echo -e "${YELLOW}Next step: make docker-start${NC}"
return 0
fi
else
echo -e "${GREEN}Sandbox image already exists locally: $SANDBOX_IMAGE${NC}"
fi
echo ""
echo -e "${GREEN}✓ Sandbox image is ready.${NC}"
echo ""
echo -e "${YELLOW}Next step: make docker-start${NC}"
}
# Start Docker development environment
# Usage: start [--gateway]
start() {
local sandbox_mode
local services
local gateway_mode=false
# Check for --gateway flag
for arg in "$@"; do
if [ "$arg" = "--gateway" ]; then
gateway_mode=true
fi
done
echo "=========================================="
echo " Starting DeerFlow Docker Development"
echo "=========================================="
echo ""
sandbox_mode="$(detect_sandbox_mode)"
if $gateway_mode; then
services="frontend gateway nginx"
if [ "$sandbox_mode" = "provisioner" ]; then
services="frontend gateway provisioner nginx"
fi
else
services="frontend gateway langgraph nginx"
if [ "$sandbox_mode" = "provisioner" ]; then
services="frontend gateway langgraph provisioner nginx"
fi
fi
if $gateway_mode; then
echo -e "${BLUE}Runtime: Gateway mode (experimental) — no LangGraph container${NC}"
fi
echo -e "${BLUE}Detected sandbox mode: $sandbox_mode${NC}"
if [ "$sandbox_mode" = "provisioner" ]; then
echo -e "${BLUE}Provisioner enabled (Kubernetes mode).${NC}"
else
echo -e "${BLUE}Provisioner disabled (not required for this sandbox mode).${NC}"
fi
echo ""
# Set DEER_FLOW_ROOT for provisioner if not already set
if [ -z "$DEER_FLOW_ROOT" ]; then
export DEER_FLOW_ROOT="$PROJECT_ROOT"
echo -e "${BLUE}Setting DEER_FLOW_ROOT=$DEER_FLOW_ROOT${NC}"
echo ""
fi
# Ensure config.yaml exists before starting.
if [ ! -f "$PROJECT_ROOT/config.yaml" ]; then
if [ -f "$PROJECT_ROOT/config.example.yaml" ]; then
cp "$PROJECT_ROOT/config.example.yaml" "$PROJECT_ROOT/config.yaml"
echo ""
echo -e "${YELLOW}============================================================${NC}"
echo -e "${YELLOW} config.yaml has been created from config.example.yaml.${NC}"
echo -e "${YELLOW} Please edit config.yaml to set your API keys and model ${NC}"
echo -e "${YELLOW} configuration before starting DeerFlow. ${NC}"
echo -e "${YELLOW}============================================================${NC}"
echo ""
echo -e "${YELLOW} Recommended: run 'make setup' before starting Docker. ${NC}"
echo -e "${YELLOW} Edit the file: $PROJECT_ROOT/config.yaml${NC}"
echo -e "${YELLOW} Then run: make docker-start${NC}"
echo ""
exit 0
else
echo -e "${YELLOW}✗ config.yaml not found and no config.example.yaml to copy from.${NC}"
exit 1
fi
fi
# Ensure extensions_config.json exists as a file before mounting.
# Docker creates a directory when bind-mounting a non-existent host path.
if [ ! -f "$PROJECT_ROOT/extensions_config.json" ]; then
if [ -f "$PROJECT_ROOT/extensions_config.example.json" ]; then
cp "$PROJECT_ROOT/extensions_config.example.json" "$PROJECT_ROOT/extensions_config.json"
echo -e "${BLUE}Created extensions_config.json from example${NC}"
else
echo "{}" > "$PROJECT_ROOT/extensions_config.json"
echo -e "${BLUE}Created empty extensions_config.json${NC}"
fi
fi
# Set nginx routing for gateway mode (envsubst in nginx container)
if $gateway_mode; then
export LANGGRAPH_UPSTREAM=gateway:8001
export LANGGRAPH_REWRITE=/api/
fi
echo "Building and starting containers..."
cd "$DOCKER_DIR" && $COMPOSE_CMD up --build -d --remove-orphans $services
echo ""
echo "=========================================="
echo " DeerFlow Docker is starting!"
echo "=========================================="
echo ""
echo " 🌐 Application: http://localhost:2026"
echo " 📡 API Gateway: http://localhost:2026/api/*"
if $gateway_mode; then
echo " 🤖 Runtime: Gateway embedded"
echo " API: /api/langgraph/* → Gateway (compat)"
else
echo " 🤖 LangGraph: http://localhost:2026/api/langgraph/*"
fi
echo ""
echo " 📋 View logs: make docker-logs"
echo " 🛑 Stop: make docker-stop"
echo ""
}
# View Docker development logs
logs() {
local service=""
case "$1" in
--frontend)
service="frontend"
echo -e "${BLUE}Viewing frontend logs...${NC}"
;;
--gateway)
service="gateway"
echo -e "${BLUE}Viewing gateway logs...${NC}"
;;
--nginx)
service="nginx"
echo -e "${BLUE}Viewing nginx logs...${NC}"
;;
--provisioner)
service="provisioner"
echo -e "${BLUE}Viewing provisioner logs...${NC}"
;;
"")
echo -e "${BLUE}Viewing all logs...${NC}"
;;
*)
echo -e "${YELLOW}Unknown option: $1${NC}"
echo "Usage: $0 logs [--frontend|--gateway|--nginx|--provisioner]"
exit 1
;;
esac
cd "$DOCKER_DIR" && $COMPOSE_CMD logs -f $service
}
# Stop Docker development environment
stop() {
# DEER_FLOW_ROOT is referenced in docker-compose-dev.yaml; set it before
# running compose down to suppress "variable is not set" warnings.
if [ -z "$DEER_FLOW_ROOT" ]; then
export DEER_FLOW_ROOT="$PROJECT_ROOT"
fi
echo "Stopping Docker development services..."
cd "$DOCKER_DIR" && $COMPOSE_CMD down
echo "Cleaning up sandbox containers..."
"$SCRIPT_DIR/cleanup-containers.sh" deer-flow-sandbox 2>/dev/null || true
echo -e "${GREEN}✓ Docker services stopped${NC}"
}
# Restart Docker development environment
restart() {
echo "========================================"
echo " Restarting DeerFlow Docker Services"
echo "========================================"
echo ""
echo -e "${BLUE}Restarting containers...${NC}"
cd "$DOCKER_DIR" && $COMPOSE_CMD restart
echo ""
echo -e "${GREEN}✓ Docker services restarted${NC}"
echo ""
echo " 🌐 Application: http://localhost:2026"
echo " 📋 View logs: make docker-logs"
echo ""
}
# Show help
help() {
echo "DeerFlow Docker Management Script"
echo ""
echo "Usage: $0 <command> [options]"
echo ""
echo "Commands:"
echo " init - Pull the sandbox image (speeds up first Pod startup)"
echo " start - Start Docker services (auto-detects sandbox mode from config.yaml)"
echo " start --gateway - Start without LangGraph container (Gateway mode, experimental)"
echo " restart - Restart all running Docker services"
echo " logs [option] - View Docker development logs"
echo " --frontend View frontend logs only"
echo " --gateway View gateway logs only"
echo " --nginx View nginx logs only"
echo " --provisioner View provisioner logs only"
echo " stop - Stop Docker development services"
echo " help - Show this help message"
echo ""
}
main() {
# Main command dispatcher
case "$1" in
init)
init
;;
start)
shift
start "$@"
;;
restart)
restart
;;
logs)
logs "$2"
;;
stop)
stop
;;
help|--help|-h|"")
help
;;
*)
echo -e "${YELLOW}Unknown command: $1${NC}"
echo ""
help
exit 1
;;
esac
}
if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
main "$@"
fi

721
deer-flow/scripts/doctor.py Normal file
View File

@@ -0,0 +1,721 @@
#!/usr/bin/env python3
"""DeerFlow Health Check (make doctor).
Checks system requirements, configuration, LLM provider, and optional
components, then prints an actionable report.
Exit codes:
0 — all required checks passed (warnings allowed)
1 — one or more required checks failed
"""
from __future__ import annotations
import os
import shutil
import subprocess
import sys
from importlib import import_module
from pathlib import Path
from typing import Literal
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
Status = Literal["ok", "warn", "fail", "skip"]
def _supports_color() -> bool:
return hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
def _c(text: str, code: str) -> str:
if _supports_color():
return f"\033[{code}m{text}\033[0m"
return text
def green(t: str) -> str:
return _c(t, "32")
def red(t: str) -> str:
return _c(t, "31")
def yellow(t: str) -> str:
return _c(t, "33")
def cyan(t: str) -> str:
return _c(t, "36")
def bold(t: str) -> str:
return _c(t, "1")
def _icon(status: Status) -> str:
icons = {"ok": green(""), "warn": yellow("!"), "fail": red(""), "skip": ""}
return icons[status]
def _run(cmd: list[str]) -> str | None:
try:
r = subprocess.run(cmd, capture_output=True, text=True, check=True)
return (r.stdout or r.stderr).strip()
except Exception:
return None
def _parse_major(version_text: str) -> int | None:
v = version_text.lstrip("v").split(".", 1)[0]
return int(v) if v.isdigit() else None
def _load_yaml_file(path: Path) -> dict:
import yaml
with open(path, encoding="utf-8") as f:
data = yaml.safe_load(f) or {}
if not isinstance(data, dict):
raise ValueError("top-level config must be a YAML mapping")
return data
def _load_app_config(config_path: Path) -> object:
from deerflow.config.app_config import AppConfig
return AppConfig.from_file(str(config_path))
def _split_use_path(use: str) -> tuple[str, str] | None:
if ":" not in use:
return None
module_name, attr_name = use.split(":", 1)
if not module_name or not attr_name:
return None
return module_name, attr_name
# ---------------------------------------------------------------------------
# Check result container
# ---------------------------------------------------------------------------
class CheckResult:
def __init__(
self,
label: str,
status: Status,
detail: str = "",
fix: str | None = None,
) -> None:
self.label = label
self.status = status
self.detail = detail
self.fix = fix
def print(self) -> None:
icon = _icon(self.status)
detail_str = f" ({self.detail})" if self.detail else ""
print(f" {icon} {self.label}{detail_str}")
if self.fix:
for line in self.fix.splitlines():
print(f" {cyan('')} {line}")
# ---------------------------------------------------------------------------
# Individual checks
# ---------------------------------------------------------------------------
def check_python() -> CheckResult:
v = sys.version_info
version_str = f"{v.major}.{v.minor}.{v.micro}"
if v >= (3, 12):
return CheckResult("Python", "ok", version_str)
return CheckResult(
"Python",
"fail",
version_str,
fix="Python 3.12+ required. Install from https://www.python.org/",
)
def check_node() -> CheckResult:
node = shutil.which("node")
if not node:
return CheckResult(
"Node.js",
"fail",
fix="Install Node.js 22+: https://nodejs.org/",
)
out = _run(["node", "-v"]) or ""
major = _parse_major(out)
if major is None or major < 22:
return CheckResult(
"Node.js",
"fail",
out or "unknown version",
fix="Node.js 22+ required. Install from https://nodejs.org/",
)
return CheckResult("Node.js", "ok", out.lstrip("v"))
def check_pnpm() -> CheckResult:
candidates = [["pnpm"], ["pnpm.cmd"]]
if shutil.which("corepack"):
candidates.append(["corepack", "pnpm"])
for cmd in candidates:
if shutil.which(cmd[0]):
out = _run([*cmd, "-v"]) or ""
return CheckResult("pnpm", "ok", out)
return CheckResult(
"pnpm",
"fail",
fix="npm install -g pnpm (or: corepack enable)",
)
def check_uv() -> CheckResult:
if not shutil.which("uv"):
return CheckResult(
"uv",
"fail",
fix="curl -LsSf https://astral.sh/uv/install.sh | sh",
)
out = _run(["uv", "--version"]) or ""
parts = out.split()
version = parts[1] if len(parts) > 1 else out
return CheckResult("uv", "ok", version)
def check_nginx() -> CheckResult:
if shutil.which("nginx"):
out = _run(["nginx", "-v"]) or ""
version = out.split("/", 1)[-1] if "/" in out else out
return CheckResult("nginx", "ok", version)
return CheckResult(
"nginx",
"fail",
fix=(
"macOS: brew install nginx\n"
"Ubuntu: sudo apt install nginx\n"
"Windows: use WSL or Docker mode"
),
)
def check_config_exists(config_path: Path) -> CheckResult:
if config_path.exists():
return CheckResult("config.yaml found", "ok")
return CheckResult(
"config.yaml found",
"fail",
fix="Run 'make setup' to create it",
)
def check_config_version(config_path: Path, project_root: Path) -> CheckResult:
if not config_path.exists():
return CheckResult("config.yaml version", "skip")
try:
import yaml
with open(config_path, encoding="utf-8") as f:
user_data = yaml.safe_load(f) or {}
user_ver = int(user_data.get("config_version", 0))
except Exception as exc:
return CheckResult("config.yaml version", "fail", str(exc))
example_path = project_root / "config.example.yaml"
if not example_path.exists():
return CheckResult("config.yaml version", "skip", "config.example.yaml not found")
try:
import yaml
with open(example_path, encoding="utf-8") as f:
example_data = yaml.safe_load(f) or {}
example_ver = int(example_data.get("config_version", 0))
except Exception:
return CheckResult("config.yaml version", "skip")
if user_ver < example_ver:
return CheckResult(
"config.yaml version",
"warn",
f"v{user_ver} < v{example_ver} (latest)",
fix="make config-upgrade",
)
return CheckResult("config.yaml version", "ok", f"v{user_ver}")
def check_models_configured(config_path: Path) -> CheckResult:
if not config_path.exists():
return CheckResult("models configured", "skip")
try:
data = _load_yaml_file(config_path)
models = data.get("models", [])
if models:
return CheckResult("models configured", "ok", f"{len(models)} model(s)")
return CheckResult(
"models configured",
"fail",
"no models found",
fix="Run 'make setup' to configure an LLM provider",
)
except Exception as exc:
return CheckResult("models configured", "fail", str(exc))
def check_config_loadable(config_path: Path) -> CheckResult:
if not config_path.exists():
return CheckResult("config.yaml loadable", "skip")
try:
_load_app_config(config_path)
return CheckResult("config.yaml loadable", "ok")
except Exception as exc:
return CheckResult(
"config.yaml loadable",
"fail",
str(exc),
fix="Run 'make setup' again, or compare with config.example.yaml",
)
def check_llm_api_key(config_path: Path) -> list[CheckResult]:
"""Check that each model's env var is set in the environment."""
if not config_path.exists():
return []
results: list[CheckResult] = []
try:
import yaml
from dotenv import load_dotenv
env_path = config_path.parent / ".env"
if env_path.exists():
load_dotenv(env_path, override=False)
with open(config_path, encoding="utf-8") as f:
data = yaml.safe_load(f) or {}
for model in data.get("models", []):
# Collect all values that look like $ENV_VAR references
def _collect_env_refs(obj: object) -> list[str]:
refs: list[str] = []
if isinstance(obj, str) and obj.startswith("$"):
refs.append(obj[1:])
elif isinstance(obj, dict):
for v in obj.values():
refs.extend(_collect_env_refs(v))
elif isinstance(obj, list):
for item in obj:
refs.extend(_collect_env_refs(item))
return refs
env_refs = _collect_env_refs(model)
model_name = model.get("name", "default")
for var in env_refs:
label = f"{var} set (model: {model_name})"
if os.environ.get(var):
results.append(CheckResult(label, "ok"))
else:
results.append(
CheckResult(
label,
"fail",
fix=f"Add {var}=<your-key> to your .env file",
)
)
except Exception as exc:
results.append(CheckResult("LLM API key check", "fail", str(exc)))
return results
def check_llm_package(config_path: Path) -> list[CheckResult]:
"""Check that the LangChain provider package is installed."""
if not config_path.exists():
return []
results: list[CheckResult] = []
try:
import yaml
with open(config_path, encoding="utf-8") as f:
data = yaml.safe_load(f) or {}
seen_packages: set[str] = set()
for model in data.get("models", []):
use = model.get("use", "")
if ":" in use:
package_path = use.split(":")[0]
# e.g. langchain_openai → langchain-openai
top_level = package_path.split(".")[0]
pip_name = top_level.replace("_", "-")
if pip_name in seen_packages:
continue
seen_packages.add(pip_name)
label = f"{pip_name} installed"
try:
__import__(top_level)
results.append(CheckResult(label, "ok"))
except ImportError:
results.append(
CheckResult(
label,
"fail",
fix=f"cd backend && uv add {pip_name}",
)
)
except Exception as exc:
results.append(CheckResult("LLM package check", "fail", str(exc)))
return results
def check_llm_auth(config_path: Path) -> list[CheckResult]:
if not config_path.exists():
return []
results: list[CheckResult] = []
try:
data = _load_yaml_file(config_path)
for model in data.get("models", []):
use = model.get("use", "")
model_name = model.get("name", "default")
if use == "deerflow.models.openai_codex_provider:CodexChatModel":
auth_path = Path(os.environ.get("CODEX_AUTH_PATH", "~/.codex/auth.json")).expanduser()
if auth_path.exists():
results.append(CheckResult(f"Codex CLI auth available (model: {model_name})", "ok", str(auth_path)))
else:
results.append(
CheckResult(
f"Codex CLI auth available (model: {model_name})",
"fail",
str(auth_path),
fix="Run `codex login`, or set CODEX_AUTH_PATH to a valid auth.json",
)
)
if use == "deerflow.models.claude_provider:ClaudeChatModel":
credential_paths = [
Path(os.environ["CLAUDE_CODE_CREDENTIALS_PATH"]).expanduser()
for env_name in ("CLAUDE_CODE_CREDENTIALS_PATH",)
if os.environ.get(env_name)
]
credential_paths.append(Path("~/.claude/.credentials.json").expanduser())
has_oauth_env = any(
os.environ.get(name)
for name in (
"ANTHROPIC_API_KEY",
"CLAUDE_CODE_OAUTH_TOKEN",
"ANTHROPIC_AUTH_TOKEN",
"CLAUDE_CODE_OAUTH_TOKEN_FILE_DESCRIPTOR",
)
)
existing_path = next((path for path in credential_paths if path.exists()), None)
if has_oauth_env or existing_path is not None:
detail = "env var set" if has_oauth_env else str(existing_path)
results.append(CheckResult(f"Claude auth available (model: {model_name})", "ok", detail))
else:
results.append(
CheckResult(
f"Claude auth available (model: {model_name})",
"fail",
fix=(
"Set ANTHROPIC_API_KEY / CLAUDE_CODE_OAUTH_TOKEN, "
"or place credentials at ~/.claude/.credentials.json"
),
)
)
except Exception as exc:
results.append(CheckResult("LLM auth check", "fail", str(exc)))
return results
def check_web_search(config_path: Path) -> CheckResult:
return check_web_tool(config_path, tool_name="web_search", label="web search configured")
def check_web_tool(config_path: Path, *, tool_name: str, label: str) -> CheckResult:
"""Warn (not fail) if a web capability is not configured."""
if not config_path.exists():
return CheckResult(label, "skip")
try:
from dotenv import load_dotenv
env_path = config_path.parent / ".env"
if env_path.exists():
load_dotenv(env_path, override=False)
data = _load_yaml_file(config_path)
tool_uses = [t.get("use", "") for t in data.get("tools", []) if t.get("name") == tool_name]
if not tool_uses:
return CheckResult(
label,
"warn",
f"no {tool_name} tool in config",
fix=f"Run 'make setup' to configure {tool_name}",
)
free_providers = {
"web_search": {"ddg_search": "DuckDuckGo (no key needed)"},
"web_fetch": {"jina_ai": "Jina AI Reader (no key needed)"},
}
key_providers = {
"web_search": {
"tavily": "TAVILY_API_KEY",
"infoquest": "INFOQUEST_API_KEY",
"exa": "EXA_API_KEY",
"firecrawl": "FIRECRAWL_API_KEY",
},
"web_fetch": {
"infoquest": "INFOQUEST_API_KEY",
"exa": "EXA_API_KEY",
"firecrawl": "FIRECRAWL_API_KEY",
},
}
for use in tool_uses:
for provider, detail in free_providers.get(tool_name, {}).items():
if provider in use:
return CheckResult(label, "ok", detail)
for use in tool_uses:
for provider, var in key_providers.get(tool_name, {}).items():
if provider in use:
val = os.environ.get(var)
if val:
return CheckResult(label, "ok", f"{provider} ({var} set)")
return CheckResult(
label,
"warn",
f"{provider} configured but {var} not set",
fix=f"Add {var}=<your-key> to .env, or run 'make setup'",
)
for use in tool_uses:
split = _split_use_path(use)
if split is None:
return CheckResult(
label,
"fail",
f"invalid use path: {use}",
fix="Use a valid module:path provider from config.example.yaml",
)
module_name, attr_name = split
try:
module = import_module(module_name)
getattr(module, attr_name)
except Exception as exc:
return CheckResult(
label,
"fail",
f"provider import failed: {use} ({exc})",
fix="Install the provider dependency or pick a valid provider in `make setup`",
)
return CheckResult(label, "ok")
except Exception as exc:
return CheckResult(label, "warn", str(exc))
def check_web_fetch(config_path: Path) -> CheckResult:
return check_web_tool(config_path, tool_name="web_fetch", label="web fetch configured")
def check_frontend_env(project_root: Path) -> CheckResult:
env_path = project_root / "frontend" / ".env"
if env_path.exists():
return CheckResult("frontend/.env found", "ok")
return CheckResult(
"frontend/.env found",
"warn",
fix="Run 'make setup' or copy frontend/.env.example to frontend/.env",
)
def check_sandbox(config_path: Path) -> list[CheckResult]:
if not config_path.exists():
return [CheckResult("sandbox configured", "skip")]
try:
data = _load_yaml_file(config_path)
sandbox = data.get("sandbox")
if not isinstance(sandbox, dict):
return [
CheckResult(
"sandbox configured",
"fail",
"missing sandbox section",
fix="Run 'make setup' to choose an execution mode",
)
]
sandbox_use = sandbox.get("use", "")
tools = data.get("tools", [])
tool_names = {tool.get("name") for tool in tools if isinstance(tool, dict)}
results: list[CheckResult] = []
if "LocalSandboxProvider" in sandbox_use:
results.append(CheckResult("sandbox configured", "ok", "Local sandbox"))
has_bash_tool = "bash" in tool_names
allow_host_bash = bool(sandbox.get("allow_host_bash", False))
if has_bash_tool and not allow_host_bash:
results.append(
CheckResult(
"bash compatibility",
"warn",
"bash tool configured but host bash is disabled",
fix="Enable host bash only in a fully trusted environment, or switch to container sandbox",
)
)
elif allow_host_bash:
results.append(
CheckResult(
"bash compatibility",
"warn",
"host bash enabled on LocalSandboxProvider",
fix="Use container sandbox for stronger isolation when bash is required",
)
)
elif "AioSandboxProvider" in sandbox_use:
results.append(CheckResult("sandbox configured", "ok", "Container sandbox"))
if not sandbox.get("provisioner_url") and not (shutil.which("docker") or shutil.which("container")):
results.append(
CheckResult(
"container runtime available",
"warn",
"no Docker/Apple Container runtime detected",
fix="Install Docker Desktop / Apple Container, or switch to local sandbox",
)
)
elif sandbox_use:
results.append(CheckResult("sandbox configured", "ok", sandbox_use))
else:
results.append(
CheckResult(
"sandbox configured",
"fail",
"sandbox.use is empty",
fix="Run 'make setup' to choose an execution mode",
)
)
return results
except Exception as exc:
return [CheckResult("sandbox configured", "fail", str(exc))]
def check_env_file(project_root: Path) -> CheckResult:
env_path = project_root / ".env"
if env_path.exists():
return CheckResult(".env found", "ok")
return CheckResult(
".env found",
"warn",
fix="Run 'make setup' or copy .env.example to .env",
)
# ---------------------------------------------------------------------------
# Main
# ---------------------------------------------------------------------------
def main() -> int:
project_root = Path(__file__).resolve().parents[1]
config_path = project_root / "config.yaml"
# Load .env early so key checks work
try:
from dotenv import load_dotenv
env_path = project_root / ".env"
if env_path.exists():
load_dotenv(env_path, override=False)
except ImportError:
pass
print()
print(bold("DeerFlow Health Check"))
print("" * 40)
sections: list[tuple[str, list[CheckResult]]] = []
# ── System Requirements ────────────────────────────────────────────────────
sys_checks = [
check_python(),
check_node(),
check_pnpm(),
check_uv(),
check_nginx(),
]
sections.append(("System Requirements", sys_checks))
# ── Configuration ─────────────────────────────────────────────────────────
cfg_checks: list[CheckResult] = [
check_env_file(project_root),
check_frontend_env(project_root),
check_config_exists(config_path),
check_config_version(config_path, project_root),
check_config_loadable(config_path),
check_models_configured(config_path),
]
sections.append(("Configuration", cfg_checks))
# ── LLM Provider ──────────────────────────────────────────────────────────
llm_checks: list[CheckResult] = [
*check_llm_api_key(config_path),
*check_llm_auth(config_path),
*check_llm_package(config_path),
]
sections.append(("LLM Provider", llm_checks))
# ── Web Capabilities ─────────────────────────────────────────────────────
search_checks = [check_web_search(config_path), check_web_fetch(config_path)]
sections.append(("Web Capabilities", search_checks))
# ── Sandbox ──────────────────────────────────────────────────────────────
sandbox_checks = check_sandbox(config_path)
sections.append(("Sandbox", sandbox_checks))
# ── Render ────────────────────────────────────────────────────────────────
total_fails = 0
total_warns = 0
for section_title, checks in sections:
print()
print(bold(section_title))
for cr in checks:
cr.print()
if cr.status == "fail":
total_fails += 1
elif cr.status == "warn":
total_warns += 1
# ── Summary ───────────────────────────────────────────────────────────────
print()
print("" * 40)
if total_fails == 0 and total_warns == 0:
print(f"Status: {green('Ready')}")
print(f"Run {cyan('make dev')} to start DeerFlow")
elif total_fails == 0:
print(f"Status: {yellow(f'Ready ({total_warns} warning(s))')}")
print(f"Run {cyan('make dev')} to start DeerFlow")
else:
print(f"Status: {red(f'{total_fails} error(s), {total_warns} warning(s)')}")
print("Fix the errors above, then run 'make doctor' again.")
print()
return 0 if total_fails == 0 else 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,166 @@
#!/usr/bin/env python3
"""Export Claude Code OAuth credentials from macOS Keychain on purpose.
This helper is intentionally manual. DeerFlow runtime does not probe Keychain.
Use this script when you want to bridge an existing Claude Code login into an
environment variable or an exported credentials file for DeerFlow.
"""
from __future__ import annotations
import argparse
import json
import os
import platform
import shlex
import subprocess
import sys
import tempfile
from hashlib import sha256
from pathlib import Path
from typing import Any
def claude_code_oauth_file_suffix() -> str:
if os.getenv("CLAUDE_CODE_CUSTOM_OAUTH_URL"):
return "-custom-oauth"
if os.getenv("USE_LOCAL_OAUTH") or os.getenv("LOCAL_BRIDGE"):
return "-local-oauth"
if os.getenv("USE_STAGING_OAUTH"):
return "-staging-oauth"
return ""
def default_service_name() -> str:
service = f"Claude Code{claude_code_oauth_file_suffix()}-credentials"
config_dir = os.getenv("CLAUDE_CONFIG_DIR")
if config_dir:
config_hash = sha256(str(Path(config_dir).expanduser()).encode()).hexdigest()[:8]
service = f"{service}-{config_hash}"
return service
def default_account_name() -> str:
return os.getenv("USER") or "claude-code-user"
def load_keychain_container(service: str, account: str) -> dict[str, Any]:
if platform.system() != "Darwin":
raise RuntimeError("Claude Code Keychain export is only supported on macOS.")
try:
result = subprocess.run(
["security", "find-generic-password", "-a", account, "-w", "-s", service],
capture_output=True,
text=True,
check=False,
)
except OSError as exc:
raise RuntimeError(f"Failed to invoke macOS security tool: {exc}") from exc
if result.returncode != 0:
stderr = (result.stderr or "").strip() or "unknown Keychain error"
raise RuntimeError(f"Keychain lookup failed for service={service!r} account={account!r}: {stderr}")
secret = (result.stdout or "").strip()
if not secret:
raise RuntimeError("Keychain item was empty.")
try:
data = json.loads(secret)
except json.JSONDecodeError as exc:
raise RuntimeError("Claude Code Keychain item did not contain valid JSON.") from exc
access_token = data.get("claudeAiOauth", {}).get("accessToken", "")
if not access_token:
raise RuntimeError("Claude Code Keychain item did not contain claudeAiOauth.accessToken.")
return data
def write_credentials_file(output_path: Path, data: dict[str, Any]) -> None:
output_path.parent.mkdir(parents=True, exist_ok=True)
fd, tmp_name = tempfile.mkstemp(prefix=f"{output_path.name}.", suffix=".tmp", dir=output_path.parent)
try:
with os.fdopen(fd, "w", encoding="utf-8") as fh:
fh.write(json.dumps(data, indent=2) + "\n")
Path(tmp_name).replace(output_path)
except Exception:
Path(tmp_name).unlink(missing_ok=True)
raise
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Manually export Claude Code OAuth credentials from macOS Keychain for DeerFlow.",
)
parser.add_argument(
"--service",
default=default_service_name(),
help="Override the Keychain service name. Defaults to Claude Code's computed service name.",
)
parser.add_argument(
"--account",
default=default_account_name(),
help="Override the Keychain account name. Defaults to the current user.",
)
parser.add_argument(
"--show-target",
action="store_true",
help="Print the resolved Keychain service/account without reading Keychain.",
)
parser.add_argument(
"--print-token",
action="store_true",
help="Print only the OAuth access token to stdout.",
)
parser.add_argument(
"--print-export",
action="store_true",
help="Print a shell export command for CLAUDE_CODE_OAUTH_TOKEN.",
)
parser.add_argument(
"--write-credentials",
type=Path,
help="Write the full Claude credentials container to this file with 0600 permissions.",
)
return parser.parse_args()
def main() -> int:
args = parse_args()
if args.show_target:
print(f"service={args.service}")
print(f"account={args.account}")
if not any([args.print_token, args.print_export, args.write_credentials]):
if not args.show_target:
print("No export action selected. Use --show-target, --print-export, --print-token, or --write-credentials.", file=sys.stderr)
return 2
return 0
try:
data = load_keychain_container(service=args.service, account=args.account)
except RuntimeError as exc:
print(str(exc), file=sys.stderr)
return 1
access_token = data["claudeAiOauth"]["accessToken"]
if args.print_token:
print(access_token)
if args.print_export:
print(f"export CLAUDE_CODE_OAUTH_TOKEN={shlex.quote(access_token)}")
if args.write_credentials:
output_path = args.write_credentials.expanduser()
write_credentials_file(output_path, data)
print(f"Wrote Claude Code credentials to {output_path}", file=sys.stderr)
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,81 @@
#!/usr/bin/env python3
"""Load the Memory Settings review sample into a local DeerFlow runtime."""
from __future__ import annotations
import argparse
import json
import shutil
from datetime import datetime
from pathlib import Path
def default_source(repo_root: Path) -> Path:
return repo_root / "backend" / "docs" / "memory-settings-sample.json"
def default_target(repo_root: Path) -> Path:
return repo_root / "backend" / ".deer-flow" / "memory.json"
def parse_args(repo_root: Path) -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Copy the Memory Settings sample data into the local runtime memory file.",
)
parser.add_argument(
"--source",
type=Path,
default=default_source(repo_root),
help="Path to the sample JSON file.",
)
parser.add_argument(
"--target",
type=Path,
default=default_target(repo_root),
help="Path to the runtime memory.json file.",
)
parser.add_argument(
"--no-backup",
action="store_true",
help="Overwrite the target without writing a backup copy first.",
)
return parser.parse_args()
def validate_json_file(path: Path) -> None:
with path.open(encoding="utf-8") as handle:
json.load(handle)
def main() -> int:
repo_root = Path(__file__).resolve().parents[1]
args = parse_args(repo_root)
source = args.source.resolve()
target = args.target.resolve()
if not source.exists():
raise SystemExit(f"Sample file not found: {source}")
validate_json_file(source)
target.parent.mkdir(parents=True, exist_ok=True)
backup_path: Path | None = None
if target.exists() and not args.no_backup:
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
backup_path = target.with_name(f"{target.name}.bak-{timestamp}")
shutil.copy2(target, backup_path)
shutil.copy2(source, target)
print(f"Loaded sample memory into: {target}")
if backup_path is not None:
print(f"Backup created at: {backup_path}")
else:
print("No backup created.")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,20 @@
@echo off
setlocal
set "bash_exe="
for /f "delims=" %%I in ('where git 2^>NUL') do (
if exist "%%~dpI..\bin\bash.exe" (
set "bash_exe=%%~dpI..\bin\bash.exe"
goto :found_bash
)
)
echo Could not locate Git for Windows Bash ("..\bin\bash.exe" relative to git on PATH). Ensure Git for Windows is installed and that git and bash.exe are available on PATH.
exit /b 1
:found_bash
echo Detected Windows - using Git Bash...
"%bash_exe%" %*
set "cmd_rc=%ERRORLEVEL%"
exit /b %cmd_rc%

331
deer-flow/scripts/serve.sh Executable file
View File

@@ -0,0 +1,331 @@
#!/usr/bin/env bash
#
# serve.sh — Unified DeerFlow service launcher
#
# Usage:
# ./scripts/serve.sh [--dev|--prod] [--gateway] [--daemon] [--stop|--restart]
#
# Modes:
# --dev Development mode with hot-reload (default)
# --prod Production mode, pre-built frontend, no hot-reload
# --gateway Gateway mode (experimental): skip LangGraph server,
# agent runtime embedded in Gateway API
# --daemon Run all services in background (nohup), exit after startup
#
# Actions:
# --skip-install Skip dependency installation (faster restart)
# --stop Stop all running services and exit
# --restart Stop all services, then start with the given mode flags
#
# Examples:
# ./scripts/serve.sh --dev # Standard dev (4 processes)
# ./scripts/serve.sh --dev --gateway # Gateway dev (3 processes)
# ./scripts/serve.sh --prod --gateway # Gateway prod (3 processes)
# ./scripts/serve.sh --dev --daemon # Standard dev, background
# ./scripts/serve.sh --dev --gateway --daemon # Gateway dev, background
# ./scripts/serve.sh --stop # Stop all services
# ./scripts/serve.sh --restart --dev --gateway # Restart in gateway mode
#
# Must be run from the repo root directory.
set -e
REPO_ROOT="$(builtin cd "$(dirname "${BASH_SOURCE[0]}")/.." >/dev/null 2>&1 && pwd -P)"
cd "$REPO_ROOT"
# ── Load .env ────────────────────────────────────────────────────────────────
if [ -f "$REPO_ROOT/.env" ]; then
set -a
source "$REPO_ROOT/.env"
set +a
fi
# ── Argument parsing ─────────────────────────────────────────────────────────
DEV_MODE=true
GATEWAY_MODE=false
DAEMON_MODE=false
SKIP_INSTALL=false
ACTION="start" # start | stop | restart
for arg in "$@"; do
case "$arg" in
--dev) DEV_MODE=true ;;
--prod) DEV_MODE=false ;;
--gateway) GATEWAY_MODE=true ;;
--daemon) DAEMON_MODE=true ;;
--skip-install) SKIP_INSTALL=true ;;
--stop) ACTION="stop" ;;
--restart) ACTION="restart" ;;
*)
echo "Unknown argument: $arg"
echo "Usage: $0 [--dev|--prod] [--gateway] [--daemon] [--skip-install] [--stop|--restart]"
exit 1
;;
esac
done
# ── Stop helper ──────────────────────────────────────────────────────────────
_kill_port() {
local port=$1
local pid
pid=$(lsof -ti :"$port" 2>/dev/null) || true
if [ -n "$pid" ]; then
kill -9 $pid 2>/dev/null || true
fi
}
stop_all() {
echo "Stopping all services..."
pkill -f "langgraph dev" 2>/dev/null || true
pkill -f "uvicorn app.gateway.app:app" 2>/dev/null || true
pkill -f "next dev" 2>/dev/null || true
pkill -f "next start" 2>/dev/null || true
pkill -f "next-server" 2>/dev/null || true
nginx -c "$REPO_ROOT/docker/nginx/nginx.local.conf" -p "$REPO_ROOT" -s quit 2>/dev/null || true
sleep 1
pkill -9 nginx 2>/dev/null || true
# Force-kill any survivors still holding the service ports
_kill_port 2024
_kill_port 8001
_kill_port 3000
./scripts/cleanup-containers.sh deer-flow-sandbox 2>/dev/null || true
echo "✓ All services stopped"
}
# ── Action routing ───────────────────────────────────────────────────────────
if [ "$ACTION" = "stop" ]; then
stop_all
exit 0
fi
ALREADY_STOPPED=false
if [ "$ACTION" = "restart" ]; then
stop_all
sleep 1
ALREADY_STOPPED=true
fi
# ── Derive runtime flags ────────────────────────────────────────────────────
if $GATEWAY_MODE; then
export SKIP_LANGGRAPH_SERVER=1
fi
# Mode label for banner
if $DEV_MODE && $GATEWAY_MODE; then
MODE_LABEL="DEV + GATEWAY (experimental)"
elif $DEV_MODE; then
MODE_LABEL="DEV (hot-reload enabled)"
elif $GATEWAY_MODE; then
MODE_LABEL="PROD + GATEWAY (experimental)"
else
MODE_LABEL="PROD (optimized)"
fi
if $DAEMON_MODE; then
MODE_LABEL="$MODE_LABEL [daemon]"
fi
# Frontend command
if $DEV_MODE; then
FRONTEND_CMD="pnpm run dev"
else
if command -v python3 >/dev/null 2>&1; then
PYTHON_BIN="python3"
elif command -v python >/dev/null 2>&1; then
PYTHON_BIN="python"
else
echo "Python is required to generate BETTER_AUTH_SECRET."
exit 1
fi
FRONTEND_CMD="env BETTER_AUTH_SECRET=$($PYTHON_BIN -c 'import secrets; print(secrets.token_hex(16))') pnpm run preview"
fi
# Extra flags for uvicorn/langgraph
LANGGRAPH_EXTRA_FLAGS="--no-reload"
if $DEV_MODE && ! $DAEMON_MODE; then
GATEWAY_EXTRA_FLAGS="--reload --reload-include='*.yaml' --reload-include='.env' --reload-exclude='*.pyc' --reload-exclude='__pycache__' --reload-exclude='sandbox/' --reload-exclude='.deer-flow/'"
else
GATEWAY_EXTRA_FLAGS=""
fi
# ── Stop existing services (skip if restart already did it) ──────────────────
if ! $ALREADY_STOPPED; then
stop_all
sleep 1
fi
# ── Config check ─────────────────────────────────────────────────────────────
if ! { \
[ -n "$DEER_FLOW_CONFIG_PATH" ] && [ -f "$DEER_FLOW_CONFIG_PATH" ] || \
[ -f backend/config.yaml ] || \
[ -f config.yaml ]; \
}; then
echo "✗ No DeerFlow config file found."
echo " Run 'make setup' (recommended) or 'make config' to generate config.yaml."
exit 1
fi
"$REPO_ROOT/scripts/config-upgrade.sh"
# ── Install dependencies ────────────────────────────────────────────────────
if ! $SKIP_INSTALL; then
echo "Syncing dependencies..."
(cd backend && uv sync --quiet) || { echo "✗ Backend dependency install failed"; exit 1; }
(cd frontend && pnpm install --silent) || { echo "✗ Frontend dependency install failed"; exit 1; }
echo "✓ Dependencies synced"
else
echo "⏩ Skipping dependency install (--skip-install)"
fi
# ── Sync frontend .env.local ─────────────────────────────────────────────────
# Next.js .env.local takes precedence over process env vars.
# The script manages the NEXT_PUBLIC_LANGGRAPH_BASE_URL line to ensure
# the frontend routes match the active backend mode.
FRONTEND_ENV_LOCAL="$REPO_ROOT/frontend/.env.local"
ENV_KEY="NEXT_PUBLIC_LANGGRAPH_BASE_URL"
sync_frontend_env() {
if $GATEWAY_MODE; then
# Point frontend to Gateway's compat API
if [ -f "$FRONTEND_ENV_LOCAL" ] && grep -q "^${ENV_KEY}=" "$FRONTEND_ENV_LOCAL"; then
sed -i.bak "s|^${ENV_KEY}=.*|${ENV_KEY}=/api/langgraph-compat|" "$FRONTEND_ENV_LOCAL" && rm -f "${FRONTEND_ENV_LOCAL}.bak"
else
echo "${ENV_KEY}=/api/langgraph-compat" >> "$FRONTEND_ENV_LOCAL"
fi
else
# Remove override — frontend falls back to /api/langgraph (standard)
if [ -f "$FRONTEND_ENV_LOCAL" ] && grep -q "^${ENV_KEY}=" "$FRONTEND_ENV_LOCAL"; then
sed -i.bak "/^${ENV_KEY}=/d" "$FRONTEND_ENV_LOCAL" && rm -f "${FRONTEND_ENV_LOCAL}.bak"
fi
fi
}
sync_frontend_env
# ── Banner ───────────────────────────────────────────────────────────────────
echo ""
echo "=========================================="
echo " Starting DeerFlow"
echo "=========================================="
echo ""
echo " Mode: $MODE_LABEL"
echo ""
echo " Services:"
if ! $GATEWAY_MODE; then
echo " LangGraph → localhost:2024 (agent runtime)"
fi
echo " Gateway → localhost:8001 (REST API$(if $GATEWAY_MODE; then echo " + agent runtime"; fi))"
echo " Frontend → localhost:3000 (Next.js)"
echo " Nginx → localhost:2026 (reverse proxy)"
echo ""
# ── Cleanup handler ──────────────────────────────────────────────────────────
cleanup() {
trap - INT TERM
echo ""
stop_all
exit 0
}
trap cleanup INT TERM
# ── Helper: start a service ──────────────────────────────────────────────────
# run_service NAME COMMAND PORT TIMEOUT
# In daemon mode, wraps with nohup. Waits for port to be ready.
run_service() {
local name="$1" cmd="$2" port="$3" timeout="$4"
echo "Starting $name..."
if $DAEMON_MODE; then
nohup sh -c "$cmd" > /dev/null 2>&1 &
else
sh -c "$cmd" &
fi
./scripts/wait-for-port.sh "$port" "$timeout" "$name" || {
local logfile="logs/$(echo "$name" | tr '[:upper:]' '[:lower:]' | tr ' ' '-').log"
echo "$name failed to start."
[ -f "$logfile" ] && tail -20 "$logfile"
cleanup
}
echo "$name started on localhost:$port"
}
# ── Start services ───────────────────────────────────────────────────────────
mkdir -p logs
mkdir -p temp/client_body_temp temp/proxy_temp temp/fastcgi_temp temp/uwsgi_temp temp/scgi_temp
# 1. LangGraph (skip in gateway mode)
if ! $GATEWAY_MODE; then
CONFIG_LOG_LEVEL=$(grep -m1 '^log_level:' config.yaml 2>/dev/null | awk '{print $2}' | tr -d ' ')
LANGGRAPH_LOG_LEVEL="${LANGGRAPH_LOG_LEVEL:-${CONFIG_LOG_LEVEL:-info}}"
LANGGRAPH_JOBS_PER_WORKER="${LANGGRAPH_JOBS_PER_WORKER:-10}"
LANGGRAPH_ALLOW_BLOCKING="${LANGGRAPH_ALLOW_BLOCKING:-0}"
LANGGRAPH_ALLOW_BLOCKING_FLAG=""
if [ "$LANGGRAPH_ALLOW_BLOCKING" = "1" ]; then
LANGGRAPH_ALLOW_BLOCKING_FLAG="--allow-blocking"
fi
run_service "LangGraph" \
"cd backend && NO_COLOR=1 uv run langgraph dev --no-browser $LANGGRAPH_ALLOW_BLOCKING_FLAG --n-jobs-per-worker $LANGGRAPH_JOBS_PER_WORKER --server-log-level $LANGGRAPH_LOG_LEVEL $LANGGRAPH_EXTRA_FLAGS > ../logs/langgraph.log 2>&1" \
2024 60
else
echo "⏩ Skipping LangGraph (Gateway mode — runtime embedded in Gateway)"
fi
# 2. Gateway API
run_service "Gateway" \
"cd backend && PYTHONPATH=. uv run uvicorn app.gateway.app:app --host 0.0.0.0 --port 8001 $GATEWAY_EXTRA_FLAGS > ../logs/gateway.log 2>&1" \
8001 30
# 3. Frontend
run_service "Frontend" \
"cd frontend && $FRONTEND_CMD > ../logs/frontend.log 2>&1" \
3000 120
# 4. Nginx
run_service "Nginx" \
"nginx -g 'daemon off;' -c '$REPO_ROOT/docker/nginx/nginx.local.conf' -p '$REPO_ROOT' > logs/nginx.log 2>&1" \
2026 10
# ── Ready ────────────────────────────────────────────────────────────────────
echo ""
echo "=========================================="
echo " ✓ DeerFlow is running! [$MODE_LABEL]"
echo "=========================================="
echo ""
echo " 🌐 http://localhost:2026"
echo ""
if $GATEWAY_MODE; then
echo " Routing: Frontend → Nginx → Gateway (embedded runtime)"
echo " API: /api/langgraph-compat/* → Gateway agent runtime"
else
echo " Routing: Frontend → Nginx → LangGraph + Gateway"
echo " API: /api/langgraph/* → LangGraph server (2024)"
fi
echo " /api/* → Gateway REST API (8001)"
echo ""
echo " 📋 Logs: logs/{langgraph,gateway,frontend,nginx}.log"
echo ""
if $DAEMON_MODE; then
echo " 🛑 Stop: make stop"
# Detach — trap is no longer needed
trap - INT TERM
else
echo " Press Ctrl+C to stop all services"
wait
fi

View File

@@ -0,0 +1,165 @@
#!/usr/bin/env python3
"""DeerFlow Interactive Setup Wizard.
Usage:
uv run python scripts/setup_wizard.py
"""
from __future__ import annotations
import sys
from pathlib import Path
# Make the scripts/ directory importable so wizard.* works
sys.path.insert(0, str(Path(__file__).resolve().parent))
def _is_interactive() -> bool:
return sys.stdin.isatty() and sys.stdout.isatty()
def main() -> int:
try:
if not _is_interactive():
print(
"Non-interactive environment detected.\n"
"Please edit config.yaml and .env directly, or run 'make setup' in a terminal."
)
return 1
from wizard.ui import (
ask_yes_no,
bold,
cyan,
green,
print_header,
print_info,
print_success,
yellow,
)
from wizard.writer import write_config_yaml, write_env_file
project_root = Path(__file__).resolve().parents[1]
config_path = project_root / "config.yaml"
env_path = project_root / ".env"
print()
print(bold("Welcome to DeerFlow Setup!"))
print("This wizard will help you configure DeerFlow in a few minutes.")
print()
if config_path.exists():
print(yellow("Existing configuration detected."))
print()
should_reconfigure = ask_yes_no("Do you want to reconfigure?", default=False)
if not should_reconfigure:
print()
print_info("Keeping existing config. Run 'make doctor' to verify your setup.")
return 0
print()
total_steps = 4
from wizard.steps.llm import run_llm_step
llm = run_llm_step(f"Step 1/{total_steps}")
from wizard.steps.search import run_search_step
search = run_search_step(f"Step 2/{total_steps}")
search_provider = search.search_provider
search_api_key = search.search_api_key
fetch_provider = search.fetch_provider
fetch_api_key = search.fetch_api_key
from wizard.steps.execution import run_execution_step
execution = run_execution_step(f"Step 3/{total_steps}")
print_header(f"Step {total_steps}/{total_steps} · Writing configuration")
write_config_yaml(
config_path,
provider_use=llm.provider.use,
model_name=llm.model_name,
display_name=f"{llm.provider.display_name} / {llm.model_name}",
api_key_field=llm.provider.api_key_field,
env_var=llm.provider.env_var,
extra_model_config=llm.provider.extra_config or None,
base_url=llm.base_url,
search_use=search_provider.use if search_provider else None,
search_tool_name=search_provider.tool_name if search_provider else "web_search",
search_extra_config=search_provider.extra_config if search_provider else None,
web_fetch_use=fetch_provider.use if fetch_provider else None,
web_fetch_tool_name=fetch_provider.tool_name if fetch_provider else "web_fetch",
web_fetch_extra_config=fetch_provider.extra_config if fetch_provider else None,
sandbox_use=execution.sandbox_use,
allow_host_bash=execution.allow_host_bash,
include_bash_tool=execution.include_bash_tool,
include_write_tools=execution.include_write_tools,
)
print_success(f"Config written to: {config_path.relative_to(project_root)}")
if not env_path.exists():
env_example = project_root / ".env.example"
if env_example.exists():
import shutil
shutil.copyfile(env_example, env_path)
env_pairs: dict[str, str] = {}
if llm.api_key:
env_pairs[llm.provider.env_var] = llm.api_key
if search_api_key and search_provider and search_provider.env_var:
env_pairs[search_provider.env_var] = search_api_key
if fetch_api_key and fetch_provider and fetch_provider.env_var:
env_pairs[fetch_provider.env_var] = fetch_api_key
if env_pairs:
write_env_file(env_path, env_pairs)
print_success(f"API keys written to: {env_path.relative_to(project_root)}")
frontend_env = project_root / "frontend" / ".env"
frontend_env_example = project_root / "frontend" / ".env.example"
if not frontend_env.exists() and frontend_env_example.exists():
import shutil
shutil.copyfile(frontend_env_example, frontend_env)
print_success("frontend/.env created from example")
print_header("Setup complete!")
print(f" {green('')} LLM: {llm.provider.display_name} / {llm.model_name}")
if search_provider:
print(f" {green('')} Web search: {search_provider.display_name}")
else:
print(f" {'':>3} Web search: not configured")
if fetch_provider:
print(f" {green('')} Web fetch: {fetch_provider.display_name}")
else:
print(f" {'':>3} Web fetch: not configured")
sandbox_label = "Local sandbox" if execution.sandbox_use.endswith("LocalSandboxProvider") else "Container sandbox"
print(f" {green('')} Execution: {sandbox_label}")
if execution.include_bash_tool:
bash_label = "enabled"
if execution.allow_host_bash:
bash_label += " (host bash)"
print(f" {green('')} Bash: {bash_label}")
else:
print(f" {'':>3} Bash: disabled")
if execution.include_write_tools:
print(f" {green('')} File write: enabled")
else:
print(f" {'':>3} File write: disabled")
print()
print("Next steps:")
print(f" {cyan('make install')} # Install dependencies (first time only)")
print(f" {cyan('make dev')} # Start DeerFlow")
print()
print(f"Run {cyan('make doctor')} to verify your setup at any time.")
print()
return 0
except KeyboardInterrupt:
print("\n\nSetup cancelled.")
return 130
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,9 @@
#!/usr/bin/env bash
#
# start-daemon.sh — Start DeerFlow in daemon (background) mode
#
# Thin wrapper around serve.sh --daemon.
# Kept for backward compatibility.
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
exec "$REPO_ROOT/scripts/serve.sh" --dev --daemon "$@"

View File

@@ -0,0 +1,218 @@
#!/usr/bin/env bash
set -euo pipefail
# Detect whether the current branch has working tool-failure downgrade:
# - Lead agent middleware chain includes error-handling
# - Subagent middleware chain includes error-handling
# - Failing tool call does not abort the whole call sequence
# - Subsequent successful tool call result is still preserved
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
BACKEND_DIR="${ROOT_DIR}/backend"
if ! command -v uv >/dev/null 2>&1; then
echo "[FAIL] uv is required but not found in PATH."
exit 1
fi
export UV_CACHE_DIR="${UV_CACHE_DIR:-/tmp/uv-cache}"
echo "[INFO] Root: ${ROOT_DIR}"
echo "[INFO] Backend: ${BACKEND_DIR}"
echo "[INFO] UV cache: ${UV_CACHE_DIR}"
echo "[INFO] Running tool-failure downgrade detector..."
cd "${BACKEND_DIR}"
uv run python -u - <<'PY'
import asyncio
import logging
import ssl
from types import SimpleNamespace
from requests.exceptions import SSLError
from langchain.agents.middleware import AgentMiddleware
from langchain_core.messages import ToolMessage
from deerflow.agents.lead_agent.agent import _build_middlewares
from deerflow.config import get_app_config
from deerflow.sandbox.middleware import SandboxMiddleware
from deerflow.agents.middlewares.thread_data_middleware import ThreadDataMiddleware
HANDSHAKE_ERROR = "[SSL: UNEXPECTED_EOF_WHILE_READING] EOF occurred in violation of protocol (_ssl.c:1000)"
logging.getLogger("deerflow.agents.middlewares.tool_error_handling_middleware").setLevel(logging.CRITICAL)
def _make_ssl_error():
return SSLError(ssl.SSLEOFError(8, HANDSHAKE_ERROR))
print("[STEP 1] Prepare simulated Tavily SSL handshake failure.")
print(f"[INFO] Handshake error payload: {HANDSHAKE_ERROR}")
TOOL_CALLS = [
{"name": "web_search", "id": "tc-fail", "args": {"query": "latest agent news"}},
{"name": "web_fetch", "id": "tc-ok", "args": {"url": "https://example.com"}},
]
def _sync_handler(req):
tool_name = req.tool_call.get("name", "unknown_tool")
if tool_name == "web_search":
raise _make_ssl_error()
return ToolMessage(
content=f"{tool_name} success",
tool_call_id=req.tool_call.get("id", "missing-id"),
name=tool_name,
status="success",
)
async def _async_handler(req):
tool_name = req.tool_call.get("name", "unknown_tool")
if tool_name == "web_search":
raise _make_ssl_error()
return ToolMessage(
content=f"{tool_name} success",
tool_call_id=req.tool_call.get("id", "missing-id"),
name=tool_name,
status="success",
)
def _collect_sync_wrappers(middlewares):
return [
m.wrap_tool_call
for m in middlewares
if m.__class__.wrap_tool_call is not AgentMiddleware.wrap_tool_call
or m.__class__.awrap_tool_call is not AgentMiddleware.awrap_tool_call
]
def _collect_async_wrappers(middlewares):
return [
m.awrap_tool_call
for m in middlewares
if m.__class__.awrap_tool_call is not AgentMiddleware.awrap_tool_call
or m.__class__.wrap_tool_call is not AgentMiddleware.wrap_tool_call
]
def _compose_sync(wrappers):
def execute(req):
return _sync_handler(req)
for wrapper in reversed(wrappers):
previous = execute
def execute(req, wrapper=wrapper, previous=previous):
return wrapper(req, previous)
return execute
def _compose_async(wrappers):
async def execute(req):
return await _async_handler(req)
for wrapper in reversed(wrappers):
previous = execute
async def execute(req, wrapper=wrapper, previous=previous):
return await wrapper(req, previous)
return execute
def _validate_outputs(label, outputs):
if len(outputs) != 2:
print(f"[FAIL] {label}: expected 2 tool outputs, got {len(outputs)}")
raise SystemExit(2)
first, second = outputs
if not isinstance(first, ToolMessage) or not isinstance(second, ToolMessage):
print(f"[FAIL] {label}: outputs are not ToolMessage instances")
raise SystemExit(3)
if first.status != "error":
print(f"[FAIL] {label}: first tool should be status=error, got {first.status}")
raise SystemExit(4)
if second.status != "success":
print(f"[FAIL] {label}: second tool should be status=success, got {second.status}")
raise SystemExit(5)
if "Error: Tool 'web_search' failed" not in first.text:
print(f"[FAIL] {label}: first tool error text missing")
raise SystemExit(6)
if "web_fetch success" not in second.text:
print(f"[FAIL] {label}: second tool success text missing")
raise SystemExit(7)
print(f"[INFO] {label}: no crash, outputs preserved (error + success).")
def _build_sub_middlewares():
try:
from deerflow.agents.middlewares.tool_error_handling_middleware import build_subagent_runtime_middlewares
except Exception:
return [
ThreadDataMiddleware(lazy_init=True),
SandboxMiddleware(lazy_init=True),
]
return build_subagent_runtime_middlewares()
def _run_sync_sequence(executor):
outputs = []
try:
for call in TOOL_CALLS:
req = SimpleNamespace(tool_call=call)
outputs.append(executor(req))
except Exception as exc:
return outputs, exc
return outputs, None
async def _run_async_sequence(executor):
outputs = []
try:
for call in TOOL_CALLS:
req = SimpleNamespace(tool_call=call)
outputs.append(await executor(req))
except Exception as exc:
return outputs, exc
return outputs, None
print("[STEP 2] Load current branch middleware chains.")
app_cfg = get_app_config()
model_name = app_cfg.models[0].name if app_cfg.models else None
if not model_name:
print("[FAIL] No model configured; cannot evaluate lead middleware chain.")
raise SystemExit(8)
lead_middlewares = _build_middlewares({"configurable": {}}, model_name=model_name)
sub_middlewares = _build_sub_middlewares()
print("[STEP 3] Simulate two sequential tool calls and check whether conversation flow aborts.")
any_crash = False
for label, middlewares in [("lead", lead_middlewares), ("subagent", sub_middlewares)]:
sync_exec = _compose_sync(_collect_sync_wrappers(middlewares))
sync_outputs, sync_exc = _run_sync_sequence(sync_exec)
if sync_exc is not None:
any_crash = True
print(f"[INFO] {label}/sync: conversation aborted after tool error ({sync_exc.__class__.__name__}: {sync_exc}).")
else:
_validate_outputs(f"{label}/sync", sync_outputs)
async_exec = _compose_async(_collect_async_wrappers(middlewares))
async_outputs, async_exc = asyncio.run(_run_async_sequence(async_exec))
if async_exc is not None:
any_crash = True
print(f"[INFO] {label}/async: conversation aborted after tool error ({async_exc.__class__.__name__}: {async_exc}).")
else:
_validate_outputs(f"{label}/async", async_outputs)
if any_crash:
print("[FAIL] Tool exception caused conversation flow to abort (no effective downgrade).")
raise SystemExit(9)
print("[PASS] Tool exceptions were downgraded; conversation flow continued with remaining tool results.")
PY

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
#
# wait-for-port.sh - Wait for a TCP port to become available
#
# Usage: ./scripts/wait-for-port.sh <port> [timeout_seconds] [service_name]
#
# Arguments:
# port - TCP port to wait for (required)
# timeout_seconds - Max seconds to wait (default: 60)
# service_name - Display name for messages (default: "Service")
#
# Exit codes:
# 0 - Port is listening
# 1 - Timed out waiting
PORT="${1:?Usage: wait-for-port.sh <port> [timeout] [service_name]}"
TIMEOUT="${2:-60}"
SERVICE="${3:-Service}"
elapsed=0
interval=1
is_port_listening() {
if command -v lsof >/dev/null 2>&1; then
if lsof -nP -iTCP:"$PORT" -sTCP:LISTEN -t >/dev/null 2>&1; then
return 0
fi
fi
if command -v ss >/dev/null 2>&1; then
if ss -ltn "( sport = :$PORT )" 2>/dev/null | tail -n +2 | grep -q .; then
return 0
fi
fi
if command -v netstat >/dev/null 2>&1; then
if netstat -ltn 2>/dev/null | awk '{print $4}' | grep -Eq "(^|[.:])${PORT}$"; then
return 0
fi
fi
if command -v timeout >/dev/null 2>&1; then
timeout 1 bash -c "exec 3<>/dev/tcp/127.0.0.1/$PORT" >/dev/null 2>&1
return $?
fi
return 1
}
while ! is_port_listening; do
if [ "$elapsed" -ge "$TIMEOUT" ]; then
echo ""
echo "$SERVICE failed to start on port $PORT after ${TIMEOUT}s"
exit 1
fi
printf "\r Waiting for %s on port %s... %ds" "$SERVICE" "$PORT" "$elapsed"
sleep "$interval"
elapsed=$((elapsed + interval))
done
printf "\r %-60s\r" "" # clear the waiting line

View File

@@ -0,0 +1 @@
# DeerFlow Setup Wizard package

View File

@@ -0,0 +1,251 @@
"""LLM and search provider definitions for the Setup Wizard."""
from __future__ import annotations
from dataclasses import dataclass, field
@dataclass
class LLMProvider:
name: str
display_name: str
description: str
use: str
models: list[str]
default_model: str
env_var: str | None
package: str | None
# Optional: some providers use a different field name for the API key in YAML
api_key_field: str = "api_key"
# Extra config fields beyond the common ones (merged into YAML)
extra_config: dict = field(default_factory=dict)
auth_hint: str | None = None
@dataclass
class WebProvider:
name: str
display_name: str
description: str
use: str
env_var: str | None # None = no API key required
tool_name: str
extra_config: dict = field(default_factory=dict)
@dataclass
class SearchProvider:
name: str
display_name: str
description: str
use: str
env_var: str | None # None = no API key required
tool_name: str = "web_search"
extra_config: dict = field(default_factory=dict)
LLM_PROVIDERS: list[LLMProvider] = [
LLMProvider(
name="openai",
display_name="OpenAI",
description="GPT-4o, GPT-4.1, o3",
use="langchain_openai:ChatOpenAI",
models=["gpt-4o", "gpt-4.1", "o3"],
default_model="gpt-4o",
env_var="OPENAI_API_KEY",
package="langchain-openai",
),
LLMProvider(
name="anthropic",
display_name="Anthropic",
description="Claude Opus 4, Sonnet 4",
use="langchain_anthropic:ChatAnthropic",
models=["claude-opus-4-5", "claude-sonnet-4-5"],
default_model="claude-sonnet-4-5",
env_var="ANTHROPIC_API_KEY",
package="langchain-anthropic",
extra_config={"max_tokens": 8192},
),
LLMProvider(
name="deepseek",
display_name="DeepSeek",
description="V3, R1",
use="langchain_deepseek:ChatDeepSeek",
models=["deepseek-chat", "deepseek-reasoner"],
default_model="deepseek-chat",
env_var="DEEPSEEK_API_KEY",
package="langchain-deepseek",
),
LLMProvider(
name="google",
display_name="Google Gemini",
description="2.0 Flash, 2.5 Pro",
use="langchain_google_genai:ChatGoogleGenerativeAI",
models=["gemini-2.0-flash", "gemini-2.5-pro"],
default_model="gemini-2.0-flash",
env_var="GEMINI_API_KEY",
package="langchain-google-genai",
api_key_field="gemini_api_key",
),
LLMProvider(
name="openrouter",
display_name="OpenRouter",
description="OpenAI-compatible gateway with broad model catalog",
use="langchain_openai:ChatOpenAI",
models=["google/gemini-2.5-flash-preview", "openai/gpt-5-mini", "anthropic/claude-sonnet-4"],
default_model="google/gemini-2.5-flash-preview",
env_var="OPENROUTER_API_KEY",
package="langchain-openai",
extra_config={
"base_url": "https://openrouter.ai/api/v1",
"request_timeout": 600.0,
"max_retries": 2,
"max_tokens": 8192,
"temperature": 0.7,
},
),
LLMProvider(
name="vllm",
display_name="vLLM",
description="Self-hosted OpenAI-compatible serving",
use="deerflow.models.vllm_provider:VllmChatModel",
models=["Qwen/Qwen3-32B", "Qwen/Qwen2.5-Coder-32B-Instruct"],
default_model="Qwen/Qwen3-32B",
env_var="VLLM_API_KEY",
package=None,
extra_config={
"base_url": "http://localhost:8000/v1",
"request_timeout": 600.0,
"max_retries": 2,
"max_tokens": 8192,
"supports_thinking": True,
"supports_vision": False,
"when_thinking_enabled": {
"extra_body": {
"chat_template_kwargs": {
"enable_thinking": True,
}
}
},
},
),
LLMProvider(
name="codex",
display_name="Codex CLI",
description="Uses Codex CLI local auth (~/.codex/auth.json)",
use="deerflow.models.openai_codex_provider:CodexChatModel",
models=["gpt-5.4", "gpt-5-mini"],
default_model="gpt-5.4",
env_var=None,
package=None,
api_key_field="api_key",
extra_config={"supports_thinking": True, "supports_reasoning_effort": True},
auth_hint="Uses existing Codex CLI auth from ~/.codex/auth.json",
),
LLMProvider(
name="claude_code",
display_name="Claude Code OAuth",
description="Uses Claude Code local OAuth credentials",
use="deerflow.models.claude_provider:ClaudeChatModel",
models=["claude-sonnet-4-6", "claude-opus-4-1"],
default_model="claude-sonnet-4-6",
env_var=None,
package=None,
extra_config={"max_tokens": 4096, "supports_thinking": True},
auth_hint="Uses Claude Code OAuth credentials from your local machine",
),
LLMProvider(
name="other",
display_name="Other OpenAI-compatible",
description="Custom gateway with base_url and model name",
use="langchain_openai:ChatOpenAI",
models=["gpt-4o"],
default_model="gpt-4o",
env_var="OPENAI_API_KEY",
package="langchain-openai",
),
]
SEARCH_PROVIDERS: list[SearchProvider] = [
SearchProvider(
name="ddg",
display_name="DuckDuckGo (free, no key needed)",
description="No API key required",
use="deerflow.community.ddg_search.tools:web_search_tool",
env_var=None,
extra_config={"max_results": 5},
),
SearchProvider(
name="tavily",
display_name="Tavily",
description="Recommended, free tier available",
use="deerflow.community.tavily.tools:web_search_tool",
env_var="TAVILY_API_KEY",
extra_config={"max_results": 5},
),
SearchProvider(
name="infoquest",
display_name="InfoQuest",
description="Higher quality vertical search, API key required",
use="deerflow.community.infoquest.tools:web_search_tool",
env_var="INFOQUEST_API_KEY",
extra_config={"search_time_range": 10},
),
SearchProvider(
name="exa",
display_name="Exa",
description="Neural + keyword web search, API key required",
use="deerflow.community.exa.tools:web_search_tool",
env_var="EXA_API_KEY",
extra_config={
"max_results": 5,
"search_type": "auto",
"contents_max_characters": 1000,
},
),
SearchProvider(
name="firecrawl",
display_name="Firecrawl",
description="Search + crawl via Firecrawl API",
use="deerflow.community.firecrawl.tools:web_search_tool",
env_var="FIRECRAWL_API_KEY",
extra_config={"max_results": 5},
),
]
WEB_FETCH_PROVIDERS: list[WebProvider] = [
WebProvider(
name="jina_ai",
display_name="Jina AI Reader",
description="Good default reader, no API key required",
use="deerflow.community.jina_ai.tools:web_fetch_tool",
env_var=None,
tool_name="web_fetch",
extra_config={"timeout": 10},
),
WebProvider(
name="exa",
display_name="Exa",
description="API key required",
use="deerflow.community.exa.tools:web_fetch_tool",
env_var="EXA_API_KEY",
tool_name="web_fetch",
),
WebProvider(
name="infoquest",
display_name="InfoQuest",
description="API key required",
use="deerflow.community.infoquest.tools:web_fetch_tool",
env_var="INFOQUEST_API_KEY",
tool_name="web_fetch",
extra_config={"timeout": 10, "fetch_time": 10, "navigation_timeout": 30},
),
WebProvider(
name="firecrawl",
display_name="Firecrawl",
description="Search-grade crawl with markdown output, API key required",
use="deerflow.community.firecrawl.tools:web_fetch_tool",
env_var="FIRECRAWL_API_KEY",
tool_name="web_fetch",
),
]

View File

@@ -0,0 +1 @@
# Setup Wizard steps

View File

@@ -0,0 +1,51 @@
"""Step: execution mode and safety-related capabilities."""
from __future__ import annotations
from dataclasses import dataclass
from wizard.ui import ask_choice, ask_yes_no, print_header, print_info, print_warning
LOCAL_SANDBOX = "deerflow.sandbox.local:LocalSandboxProvider"
CONTAINER_SANDBOX = "deerflow.community.aio_sandbox:AioSandboxProvider"
@dataclass
class ExecutionStepResult:
sandbox_use: str
allow_host_bash: bool
include_bash_tool: bool
include_write_tools: bool
def run_execution_step(step_label: str = "Step 3/4") -> ExecutionStepResult:
print_header(f"{step_label} · Execution & Safety")
print_info("Choose how much execution power DeerFlow should have in this workspace.")
options = [
"Local sandbox — fastest, uses host filesystem paths",
"Container sandbox — more isolated, requires Docker or Apple Container",
]
sandbox_idx = ask_choice("Execution mode", options, default=0)
sandbox_use = LOCAL_SANDBOX if sandbox_idx == 0 else CONTAINER_SANDBOX
print()
if sandbox_use == LOCAL_SANDBOX:
print_warning(
"Local sandbox is convenient but not a secure shell isolation boundary."
)
print_info("Keep host bash disabled unless this is a fully trusted local workflow.")
else:
print_info("Container sandbox isolates shell execution better than host-local mode.")
include_bash_tool = ask_yes_no("Enable bash command execution?", default=False)
include_write_tools = ask_yes_no(
"Enable file write tools (write_file, str_replace)?", default=True
)
return ExecutionStepResult(
sandbox_use=sandbox_use,
allow_host_bash=sandbox_use == LOCAL_SANDBOX and include_bash_tool,
include_bash_tool=include_bash_tool,
include_write_tools=include_write_tools,
)

View File

@@ -0,0 +1,76 @@
"""Step 1: LLM provider selection."""
from __future__ import annotations
from dataclasses import dataclass
from wizard.providers import LLM_PROVIDERS, LLMProvider
from wizard.ui import (
ask_choice,
ask_secret,
ask_text,
print_header,
print_info,
print_success,
)
@dataclass
class LLMStepResult:
provider: LLMProvider
model_name: str
api_key: str | None
base_url: str | None = None
def run_llm_step(step_label: str = "Step 1/3") -> LLMStepResult:
print_header(f"{step_label} · Choose your LLM provider")
options = [f"{p.display_name} ({p.description})" for p in LLM_PROVIDERS]
idx = ask_choice("Enter choice", options)
provider = LLM_PROVIDERS[idx]
print()
# Model selection (show list, default to first)
if len(provider.models) > 1:
print_info(f"Available models for {provider.display_name}:")
model_idx = ask_choice("Select model", provider.models, default=0)
model_name = provider.models[model_idx]
else:
model_name = provider.models[0]
print()
base_url: str | None = None
if provider.name in {"openrouter", "vllm"}:
base_url = provider.extra_config.get("base_url")
if provider.name == "other":
print_header(f"{step_label} · Connection details")
base_url = ask_text("Base URL (e.g. https://api.openai.com/v1)", required=True)
model_name = ask_text("Model name", default=provider.default_model)
elif provider.auth_hint:
print_header(f"{step_label} · Authentication")
print_info(provider.auth_hint)
api_key = None
return LLMStepResult(
provider=provider,
model_name=model_name,
api_key=api_key,
base_url=base_url,
)
print_header(f"{step_label} · Enter your API Key")
if provider.env_var:
api_key = ask_secret(f"{provider.env_var}")
else:
api_key = None
if api_key:
print_success(f"Key will be saved to .env as {provider.env_var}")
return LLMStepResult(
provider=provider,
model_name=model_name,
api_key=api_key,
base_url=base_url,
)

View File

@@ -0,0 +1,66 @@
"""Step: Web search configuration."""
from __future__ import annotations
from dataclasses import dataclass
from wizard.providers import SEARCH_PROVIDERS, WEB_FETCH_PROVIDERS, SearchProvider, WebProvider
from wizard.ui import ask_choice, ask_secret, print_header, print_info, print_success
@dataclass
class SearchStepResult:
search_provider: SearchProvider | None # None = skip
search_api_key: str | None
fetch_provider: WebProvider | None # None = skip
fetch_api_key: str | None
def run_search_step(step_label: str = "Step 3/3") -> SearchStepResult:
print_header(f"{step_label} · Web Search & Fetch (optional)")
provided_keys: dict[str, str] = {}
search_options = [f"{p.display_name}{p.description}" for p in SEARCH_PROVIDERS]
search_options.append("Skip for now (agent still works without web search)")
idx = ask_choice("Choose a web search provider", search_options, default=0)
search_provider: SearchProvider | None = None
search_api_key: str | None = None
if idx >= len(SEARCH_PROVIDERS):
search_provider = None
else:
search_provider = SEARCH_PROVIDERS[idx]
if search_provider.env_var:
print()
search_api_key = ask_secret(f"{search_provider.env_var}")
provided_keys[search_provider.env_var] = search_api_key
print_success(f"Key will be saved to .env as {search_provider.env_var}")
print()
fetch_options = [f"{p.display_name}{p.description}" for p in WEB_FETCH_PROVIDERS]
fetch_options.append("Skip for now (agent can still answer without web fetch)")
idx = ask_choice("Choose a web fetch provider", fetch_options, default=0)
fetch_provider: WebProvider | None = None
fetch_api_key: str | None = None
if idx < len(WEB_FETCH_PROVIDERS):
fetch_provider = WEB_FETCH_PROVIDERS[idx]
if fetch_provider.env_var:
if fetch_provider.env_var in provided_keys:
fetch_api_key = provided_keys[fetch_provider.env_var]
print()
print_info(f"Reusing {fetch_provider.env_var} from web search provider")
else:
print()
fetch_api_key = ask_secret(f"{fetch_provider.env_var}")
provided_keys[fetch_provider.env_var] = fetch_api_key
print_success(f"Key will be saved to .env as {fetch_provider.env_var}")
return SearchStepResult(
search_provider=search_provider,
search_api_key=search_api_key,
fetch_provider=fetch_provider,
fetch_api_key=fetch_api_key,
)

View File

@@ -0,0 +1,261 @@
"""Terminal UI helpers for the Setup Wizard."""
from __future__ import annotations
import getpass
import shutil
import sys
try:
import termios
import tty
except ImportError: # pragma: no cover - non-Unix fallback
termios = None
tty = None
# ── ANSI colours ──────────────────────────────────────────────────────────────
def _supports_color() -> bool:
return hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
def _c(text: str, code: str) -> str:
if _supports_color():
return f"\033[{code}m{text}\033[0m"
return text
def green(text: str) -> str:
return _c(text, "32")
def red(text: str) -> str:
return _c(text, "31")
def yellow(text: str) -> str:
return _c(text, "33")
def cyan(text: str) -> str:
return _c(text, "36")
def bold(text: str) -> str:
return _c(text, "1")
def inverse(text: str) -> str:
return _c(text, "7")
# ── UI primitives ─────────────────────────────────────────────────────────────
def print_header(title: str) -> None:
width = max(len(title) + 4, 44)
bar = "" * width
print()
print(f"{bar}")
print(f"{title.ljust(width - 2)}")
print(f"{bar}")
print()
def print_section(title: str) -> None:
print()
print(bold(f"── {title} ──"))
print()
def print_success(message: str) -> None:
print(f" {green('')} {message}")
def print_warning(message: str) -> None:
print(f" {yellow('!')} {message}")
def print_error(message: str) -> None:
print(f" {red('')} {message}")
def print_info(message: str) -> None:
print(f" {cyan('')} {message}")
def _ask_choice_with_numbers(prompt: str, options: list[str], default: int | None = None) -> int:
for i, opt in enumerate(options, 1):
marker = f" {green('*')}" if default is not None and i - 1 == default else " "
print(f"{marker} {i}. {opt}")
print()
while True:
suffix = f" [{default + 1}]" if default is not None else ""
raw = input(f"{prompt}{suffix}: ").strip()
if raw == "" and default is not None:
return default
if raw.isdigit():
idx = int(raw) - 1
if 0 <= idx < len(options):
return idx
print(f" Please enter a number between 1 and {len(options)}.")
def _supports_arrow_menu() -> bool:
return (
termios is not None
and tty is not None
and hasattr(sys.stdin, "isatty")
and hasattr(sys.stdout, "isatty")
and sys.stdin.isatty()
and sys.stdout.isatty()
and sys.stderr.isatty()
)
def _clear_rendered_lines(count: int) -> None:
if count <= 0:
return
sys.stdout.write("\x1b[2K\r")
for _ in range(count):
sys.stdout.write("\x1b[1A\x1b[2K\r")
def _read_key(fd: int) -> str:
first = sys.stdin.read(1)
if first != "\x1b":
return first
second = sys.stdin.read(1)
if second != "[":
return first
third = sys.stdin.read(1)
return f"\x1b[{third}"
def _terminal_width() -> int:
return max(shutil.get_terminal_size(fallback=(80, 24)).columns, 40)
def _truncate_line(text: str, max_width: int) -> str:
if len(text) <= max_width:
return text
if max_width <= 1:
return text[:max_width]
return f"{text[: max_width - 1]}"
def _render_choice_menu(options: list[str], selected: int) -> int:
number_width = len(str(len(options)))
menu_width = _terminal_width()
content_width = max(menu_width - 3, 20)
for i, opt in enumerate(options, 1):
line = _truncate_line(f"{i:>{number_width}}. {opt}", content_width)
if i - 1 == selected:
print(f"{green('')} {inverse(bold(line))}")
else:
print(f" {line}")
sys.stdout.flush()
return len(options)
def _ask_choice_with_arrows(prompt: str, options: list[str], default: int | None = None) -> int:
selected = default if default is not None else 0
typed = ""
fd = sys.stdin.fileno()
original_settings = termios.tcgetattr(fd)
rendered_lines = 0
try:
sys.stdout.write("\x1b[?25l")
sys.stdout.flush()
tty.setcbreak(fd)
prompt_help = f"{prompt} (↑/↓ move, Enter confirm, number quick-select)"
print(cyan(_truncate_line(prompt_help, max(_terminal_width() - 2, 20))))
while True:
if rendered_lines:
_clear_rendered_lines(rendered_lines)
rendered_lines = _render_choice_menu(options, selected)
key = _read_key(fd)
if key == "\x03":
raise KeyboardInterrupt
if key in ("\r", "\n"):
if typed:
idx = int(typed) - 1
if 0 <= idx < len(options):
selected = idx
typed = ""
break
if key == "\x1b[A":
selected = (selected - 1) % len(options)
typed = ""
continue
if key == "\x1b[B":
selected = (selected + 1) % len(options)
typed = ""
continue
if key in ("\x7f", "\b"):
typed = typed[:-1]
continue
if key.isdigit():
typed += key
continue
if rendered_lines:
_clear_rendered_lines(rendered_lines)
print(f"{prompt}: {options[selected]}")
return selected
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, original_settings)
sys.stdout.write("\x1b[?25h")
sys.stdout.flush()
def ask_choice(prompt: str, options: list[str], default: int | None = None) -> int:
"""Present a menu and return the 0-based index of the selected option."""
if _supports_arrow_menu():
return _ask_choice_with_arrows(prompt, options, default=default)
return _ask_choice_with_numbers(prompt, options, default=default)
def ask_text(prompt: str, default: str = "", required: bool = False) -> str:
"""Ask for a text value, returning default if the user presses Enter."""
suffix = f" [{default}]" if default else ""
while True:
value = input(f"{prompt}{suffix}: ").strip()
if value:
return value
if default:
return default
if not required:
return ""
print(" This field is required.")
def ask_secret(prompt: str) -> str:
"""Ask for a secret value (hidden input)."""
while True:
value = getpass.getpass(f"{prompt}: ").strip()
if value:
return value
print(" API key cannot be empty.")
def ask_yes_no(prompt: str, default: bool = True) -> bool:
"""Ask a yes/no question."""
suffix = "[Y/N]"
while True:
raw = input(f"{prompt} {suffix}: ").strip().lower()
if raw == "":
return default
if raw in ("y", "yes"):
return True
if raw in ("n", "no"):
return False
print(" Please enter y or n.")

View File

@@ -0,0 +1,290 @@
"""Config file writer for the Setup Wizard.
Writes config.yaml as a minimal working configuration and updates .env
without wiping existing user customisations where possible.
"""
from __future__ import annotations
from copy import deepcopy
from pathlib import Path
from typing import Any
import yaml
def _project_root() -> Path:
return Path(__file__).resolve().parents[2]
# ── .env helpers ──────────────────────────────────────────────────────────────
def read_env_file(env_path: Path) -> dict[str, str]:
"""Parse a .env file into a dict (ignores comments and blank lines)."""
result: dict[str, str] = {}
if not env_path.exists():
return result
for line in env_path.read_text(encoding="utf-8").splitlines():
line = line.strip()
if not line or line.startswith("#"):
continue
if "=" in line:
key, _, value = line.partition("=")
result[key.strip()] = value.strip()
return result
def write_env_file(env_path: Path, pairs: dict[str, str]) -> None:
"""Merge *pairs* into an existing (or new) .env file.
Existing keys are updated in place; new keys are appended.
Lines with comments and other formatting are preserved.
"""
lines: list[str] = []
if env_path.exists():
lines = env_path.read_text(encoding="utf-8").splitlines()
updated: set[str] = set()
new_lines: list[str] = []
for line in lines:
stripped = line.strip()
if stripped and not stripped.startswith("#") and "=" in stripped:
key = stripped.split("=", 1)[0].strip()
if key in pairs:
new_lines.append(f"{key}={pairs[key]}")
updated.add(key)
continue
new_lines.append(line)
for key, value in pairs.items():
if key not in updated:
new_lines.append(f"{key}={value}")
env_path.write_text("\n".join(new_lines) + "\n", encoding="utf-8")
# ── config.yaml helpers ───────────────────────────────────────────────────────
def _yaml_dump(data: Any) -> str:
return yaml.safe_dump(data, default_flow_style=False, allow_unicode=True, sort_keys=False)
def _default_tools() -> list[dict[str, Any]]:
return [
{"name": "image_search", "use": "deerflow.community.image_search.tools:image_search_tool", "group": "web", "max_results": 5},
{"name": "ls", "use": "deerflow.sandbox.tools:ls_tool", "group": "file:read"},
{"name": "read_file", "use": "deerflow.sandbox.tools:read_file_tool", "group": "file:read"},
{"name": "glob", "use": "deerflow.sandbox.tools:glob_tool", "group": "file:read"},
{"name": "grep", "use": "deerflow.sandbox.tools:grep_tool", "group": "file:read"},
{"name": "write_file", "use": "deerflow.sandbox.tools:write_file_tool", "group": "file:write"},
{"name": "str_replace", "use": "deerflow.sandbox.tools:str_replace_tool", "group": "file:write"},
{"name": "bash", "use": "deerflow.sandbox.tools:bash_tool", "group": "bash"},
]
def _build_tools(
*,
base_tools: list[dict[str, Any]] | None,
search_use: str | None,
search_tool_name: str,
search_extra_config: dict | None,
web_fetch_use: str | None,
web_fetch_tool_name: str,
web_fetch_extra_config: dict | None,
include_bash_tool: bool,
include_write_tools: bool,
) -> list[dict[str, Any]]:
tools = deepcopy(base_tools if base_tools is not None else _default_tools())
tools = [
tool
for tool in tools
if tool.get("name") not in {search_tool_name, web_fetch_tool_name, "write_file", "str_replace", "bash"}
]
web_group = "web"
if search_use:
search_tool: dict[str, Any] = {
"name": search_tool_name,
"use": search_use,
"group": web_group,
}
if search_extra_config:
search_tool.update(search_extra_config)
tools.insert(0, search_tool)
if web_fetch_use:
fetch_tool: dict[str, Any] = {
"name": web_fetch_tool_name,
"use": web_fetch_use,
"group": web_group,
}
if web_fetch_extra_config:
fetch_tool.update(web_fetch_extra_config)
insert_idx = 1 if search_use else 0
tools.insert(insert_idx, fetch_tool)
if include_write_tools:
tools.extend(
[
{"name": "write_file", "use": "deerflow.sandbox.tools:write_file_tool", "group": "file:write"},
{"name": "str_replace", "use": "deerflow.sandbox.tools:str_replace_tool", "group": "file:write"},
]
)
if include_bash_tool:
tools.append({"name": "bash", "use": "deerflow.sandbox.tools:bash_tool", "group": "bash"})
return tools
def _make_model_config_name(model_name: str) -> str:
"""Derive a meaningful config model name from the provider model identifier.
Replaces path separators and dots with hyphens so the result is a clean
YAML-friendly identifier (e.g. "google/gemini-2.5-pro""gemini-2-5-pro",
"gpt-5.4""gpt-5-4", "deepseek-chat""deepseek-chat").
"""
# Take only the last path component for namespaced models (e.g. "org/model-name")
base = model_name.split("/")[-1]
# Replace dots with hyphens so "gpt-5.4" → "gpt-5-4"
return base.replace(".", "-")
def build_minimal_config(
*,
provider_use: str,
model_name: str,
display_name: str,
api_key_field: str,
env_var: str | None,
extra_model_config: dict | None = None,
base_url: str | None = None,
search_use: str | None = None,
search_tool_name: str = "web_search",
search_extra_config: dict | None = None,
web_fetch_use: str | None = None,
web_fetch_tool_name: str = "web_fetch",
web_fetch_extra_config: dict | None = None,
sandbox_use: str = "deerflow.sandbox.local:LocalSandboxProvider",
allow_host_bash: bool = False,
include_bash_tool: bool = False,
include_write_tools: bool = True,
config_version: int = 5,
base_config: dict[str, Any] | None = None,
) -> str:
"""Build the content of a minimal config.yaml."""
from datetime import date
today = date.today().isoformat()
model_entry: dict[str, Any] = {
"name": _make_model_config_name(model_name),
"display_name": display_name,
"use": provider_use,
"model": model_name,
}
if env_var:
model_entry[api_key_field] = f"${env_var}"
extra_model_fields = dict(extra_model_config or {})
if "base_url" in extra_model_fields and not base_url:
base_url = extra_model_fields.pop("base_url")
if base_url:
model_entry["base_url"] = base_url
if extra_model_fields:
model_entry.update(extra_model_fields)
data: dict[str, Any] = deepcopy(base_config or {})
data["config_version"] = config_version
data["models"] = [model_entry]
base_tools = data.get("tools")
if not isinstance(base_tools, list):
base_tools = None
tools = _build_tools(
base_tools=base_tools,
search_use=search_use,
search_tool_name=search_tool_name,
search_extra_config=search_extra_config,
web_fetch_use=web_fetch_use,
web_fetch_tool_name=web_fetch_tool_name,
web_fetch_extra_config=web_fetch_extra_config,
include_bash_tool=include_bash_tool,
include_write_tools=include_write_tools,
)
data["tools"] = tools
sandbox_config = deepcopy(data.get("sandbox") if isinstance(data.get("sandbox"), dict) else {})
sandbox_config["use"] = sandbox_use
if sandbox_use == "deerflow.sandbox.local:LocalSandboxProvider":
sandbox_config["allow_host_bash"] = allow_host_bash
else:
sandbox_config.pop("allow_host_bash", None)
data["sandbox"] = sandbox_config
header = (
f"# DeerFlow Configuration\n"
f"# Generated by 'make setup' on {today}\n"
f"# Run 'make setup' to reconfigure, or edit this file for advanced options.\n"
f"# Full reference: config.example.yaml\n\n"
)
return header + _yaml_dump(data)
def write_config_yaml(
config_path: Path,
*,
provider_use: str,
model_name: str,
display_name: str,
api_key_field: str,
env_var: str | None,
extra_model_config: dict | None = None,
base_url: str | None = None,
search_use: str | None = None,
search_tool_name: str = "web_search",
search_extra_config: dict | None = None,
web_fetch_use: str | None = None,
web_fetch_tool_name: str = "web_fetch",
web_fetch_extra_config: dict | None = None,
sandbox_use: str = "deerflow.sandbox.local:LocalSandboxProvider",
allow_host_bash: bool = False,
include_bash_tool: bool = False,
include_write_tools: bool = True,
) -> None:
"""Write (or overwrite) config.yaml with a minimal working configuration."""
# Read config_version from config.example.yaml if present
config_version = 5
example_path = config_path.parent / "config.example.yaml"
if example_path.exists():
try:
import yaml as _yaml
raw = _yaml.safe_load(example_path.read_text(encoding="utf-8")) or {}
config_version = int(raw.get("config_version", 5))
example_defaults = raw
except Exception:
example_defaults = None
else:
example_defaults = None
content = build_minimal_config(
provider_use=provider_use,
model_name=model_name,
display_name=display_name,
api_key_field=api_key_field,
env_var=env_var,
extra_model_config=extra_model_config,
base_url=base_url,
search_use=search_use,
search_tool_name=search_tool_name,
search_extra_config=search_extra_config,
web_fetch_use=web_fetch_use,
web_fetch_tool_name=web_fetch_tool_name,
web_fetch_extra_config=web_fetch_extra_config,
sandbox_use=sandbox_use,
allow_host_bash=allow_host_bash,
include_bash_tool=include_bash_tool,
include_write_tools=include_write_tools,
config_version=config_version,
base_config=example_defaults,
)
config_path.write_text(content, encoding="utf-8")