Initial commit: hardened DeerFlow factory

Vendored deer-flow upstream (bytedance/deer-flow) plus prompt-injection
hardening:

- New deerflow.security package: content_delimiter, html_cleaner,
  sanitizer (8 layers — invisible chars, control chars, symbols, NFC,
  PUA, tag chars, horizontal whitespace collapse with newline/tab
  preservation, length cap)
- New deerflow.community.searx package: web_search, web_fetch,
  image_search backed by a private SearX instance, every external
  string sanitized and wrapped in <<<EXTERNAL_UNTRUSTED_CONTENT>>>
  delimiters
- All native community web providers (ddg_search, tavily, exa,
  firecrawl, jina_ai, infoquest, image_search) replaced with hard-fail
  stubs that raise NativeWebToolDisabledError at import time, so a
  misconfigured tool.use path fails loud rather than silently falling
  back to unsanitized output
- Native client back-doors (jina_client.py, infoquest_client.py)
  stubbed too
- Native-tool tests quarantined under tests/_disabled_native/
  (collect_ignore_glob via local conftest.py)
- Sanitizer Layer 7 fix: only collapse horizontal whitespace, preserve
  newlines and tabs so list/table structure survives
- Hardened runtime config.yaml references only the searx-backed tools
- Factory overlay (backend/) kept in sync with deer-flow tree as a
  reference / source

See HARDENING.md for the full audit trail and verification steps.
This commit is contained in:
2026-04-12 14:23:57 +02:00
commit 6de0bf9f5b
889 changed files with 173052 additions and 0 deletions

View File

@@ -0,0 +1 @@
# Setup Wizard steps

View File

@@ -0,0 +1,51 @@
"""Step: execution mode and safety-related capabilities."""
from __future__ import annotations
from dataclasses import dataclass
from wizard.ui import ask_choice, ask_yes_no, print_header, print_info, print_warning
LOCAL_SANDBOX = "deerflow.sandbox.local:LocalSandboxProvider"
CONTAINER_SANDBOX = "deerflow.community.aio_sandbox:AioSandboxProvider"
@dataclass
class ExecutionStepResult:
sandbox_use: str
allow_host_bash: bool
include_bash_tool: bool
include_write_tools: bool
def run_execution_step(step_label: str = "Step 3/4") -> ExecutionStepResult:
print_header(f"{step_label} · Execution & Safety")
print_info("Choose how much execution power DeerFlow should have in this workspace.")
options = [
"Local sandbox — fastest, uses host filesystem paths",
"Container sandbox — more isolated, requires Docker or Apple Container",
]
sandbox_idx = ask_choice("Execution mode", options, default=0)
sandbox_use = LOCAL_SANDBOX if sandbox_idx == 0 else CONTAINER_SANDBOX
print()
if sandbox_use == LOCAL_SANDBOX:
print_warning(
"Local sandbox is convenient but not a secure shell isolation boundary."
)
print_info("Keep host bash disabled unless this is a fully trusted local workflow.")
else:
print_info("Container sandbox isolates shell execution better than host-local mode.")
include_bash_tool = ask_yes_no("Enable bash command execution?", default=False)
include_write_tools = ask_yes_no(
"Enable file write tools (write_file, str_replace)?", default=True
)
return ExecutionStepResult(
sandbox_use=sandbox_use,
allow_host_bash=sandbox_use == LOCAL_SANDBOX and include_bash_tool,
include_bash_tool=include_bash_tool,
include_write_tools=include_write_tools,
)

View File

@@ -0,0 +1,76 @@
"""Step 1: LLM provider selection."""
from __future__ import annotations
from dataclasses import dataclass
from wizard.providers import LLM_PROVIDERS, LLMProvider
from wizard.ui import (
ask_choice,
ask_secret,
ask_text,
print_header,
print_info,
print_success,
)
@dataclass
class LLMStepResult:
provider: LLMProvider
model_name: str
api_key: str | None
base_url: str | None = None
def run_llm_step(step_label: str = "Step 1/3") -> LLMStepResult:
print_header(f"{step_label} · Choose your LLM provider")
options = [f"{p.display_name} ({p.description})" for p in LLM_PROVIDERS]
idx = ask_choice("Enter choice", options)
provider = LLM_PROVIDERS[idx]
print()
# Model selection (show list, default to first)
if len(provider.models) > 1:
print_info(f"Available models for {provider.display_name}:")
model_idx = ask_choice("Select model", provider.models, default=0)
model_name = provider.models[model_idx]
else:
model_name = provider.models[0]
print()
base_url: str | None = None
if provider.name in {"openrouter", "vllm"}:
base_url = provider.extra_config.get("base_url")
if provider.name == "other":
print_header(f"{step_label} · Connection details")
base_url = ask_text("Base URL (e.g. https://api.openai.com/v1)", required=True)
model_name = ask_text("Model name", default=provider.default_model)
elif provider.auth_hint:
print_header(f"{step_label} · Authentication")
print_info(provider.auth_hint)
api_key = None
return LLMStepResult(
provider=provider,
model_name=model_name,
api_key=api_key,
base_url=base_url,
)
print_header(f"{step_label} · Enter your API Key")
if provider.env_var:
api_key = ask_secret(f"{provider.env_var}")
else:
api_key = None
if api_key:
print_success(f"Key will be saved to .env as {provider.env_var}")
return LLMStepResult(
provider=provider,
model_name=model_name,
api_key=api_key,
base_url=base_url,
)

View File

@@ -0,0 +1,66 @@
"""Step: Web search configuration."""
from __future__ import annotations
from dataclasses import dataclass
from wizard.providers import SEARCH_PROVIDERS, WEB_FETCH_PROVIDERS, SearchProvider, WebProvider
from wizard.ui import ask_choice, ask_secret, print_header, print_info, print_success
@dataclass
class SearchStepResult:
search_provider: SearchProvider | None # None = skip
search_api_key: str | None
fetch_provider: WebProvider | None # None = skip
fetch_api_key: str | None
def run_search_step(step_label: str = "Step 3/3") -> SearchStepResult:
print_header(f"{step_label} · Web Search & Fetch (optional)")
provided_keys: dict[str, str] = {}
search_options = [f"{p.display_name}{p.description}" for p in SEARCH_PROVIDERS]
search_options.append("Skip for now (agent still works without web search)")
idx = ask_choice("Choose a web search provider", search_options, default=0)
search_provider: SearchProvider | None = None
search_api_key: str | None = None
if idx >= len(SEARCH_PROVIDERS):
search_provider = None
else:
search_provider = SEARCH_PROVIDERS[idx]
if search_provider.env_var:
print()
search_api_key = ask_secret(f"{search_provider.env_var}")
provided_keys[search_provider.env_var] = search_api_key
print_success(f"Key will be saved to .env as {search_provider.env_var}")
print()
fetch_options = [f"{p.display_name}{p.description}" for p in WEB_FETCH_PROVIDERS]
fetch_options.append("Skip for now (agent can still answer without web fetch)")
idx = ask_choice("Choose a web fetch provider", fetch_options, default=0)
fetch_provider: WebProvider | None = None
fetch_api_key: str | None = None
if idx < len(WEB_FETCH_PROVIDERS):
fetch_provider = WEB_FETCH_PROVIDERS[idx]
if fetch_provider.env_var:
if fetch_provider.env_var in provided_keys:
fetch_api_key = provided_keys[fetch_provider.env_var]
print()
print_info(f"Reusing {fetch_provider.env_var} from web search provider")
else:
print()
fetch_api_key = ask_secret(f"{fetch_provider.env_var}")
provided_keys[fetch_provider.env_var] = fetch_api_key
print_success(f"Key will be saved to .env as {fetch_provider.env_var}")
return SearchStepResult(
search_provider=search_provider,
search_api_key=search_api_key,
fetch_provider=fetch_provider,
fetch_api_key=fetch_api_key,
)