Initial commit: hardened DeerFlow factory

Vendored deer-flow upstream (bytedance/deer-flow) plus prompt-injection
hardening:

- New deerflow.security package: content_delimiter, html_cleaner,
  sanitizer (8 layers — invisible chars, control chars, symbols, NFC,
  PUA, tag chars, horizontal whitespace collapse with newline/tab
  preservation, length cap)
- New deerflow.community.searx package: web_search, web_fetch,
  image_search backed by a private SearX instance, every external
  string sanitized and wrapped in <<<EXTERNAL_UNTRUSTED_CONTENT>>>
  delimiters
- All native community web providers (ddg_search, tavily, exa,
  firecrawl, jina_ai, infoquest, image_search) replaced with hard-fail
  stubs that raise NativeWebToolDisabledError at import time, so a
  misconfigured tool.use path fails loud rather than silently falling
  back to unsanitized output
- Native client back-doors (jina_client.py, infoquest_client.py)
  stubbed too
- Native-tool tests quarantined under tests/_disabled_native/
  (collect_ignore_glob via local conftest.py)
- Sanitizer Layer 7 fix: only collapse horizontal whitespace, preserve
  newlines and tabs so list/table structure survives
- Hardened runtime config.yaml references only the searx-backed tools
- Factory overlay (backend/) kept in sync with deer-flow tree as a
  reference / source

See HARDENING.md for the full audit trail and verification steps.
This commit is contained in:
2026-04-12 14:23:57 +02:00
commit 6de0bf9f5b
889 changed files with 173052 additions and 0 deletions

View File

@@ -0,0 +1,78 @@
"""Canonical serialization for LangChain / LangGraph objects.
Provides a single source of truth for converting LangChain message
objects, Pydantic models, and LangGraph state dicts into plain
JSON-serialisable Python structures.
Consumers: ``deerflow.runtime.runs.worker`` (SSE publishing) and
``app.gateway.routers.threads`` (REST responses).
"""
from __future__ import annotations
from typing import Any
def serialize_lc_object(obj: Any) -> Any:
"""Recursively serialize a LangChain object to a JSON-serialisable dict."""
if obj is None:
return None
if isinstance(obj, (str, int, float, bool)):
return obj
if isinstance(obj, dict):
return {k: serialize_lc_object(v) for k, v in obj.items()}
if isinstance(obj, (list, tuple)):
return [serialize_lc_object(item) for item in obj]
# Pydantic v2
if hasattr(obj, "model_dump"):
try:
return obj.model_dump()
except Exception:
pass
# Pydantic v1 / older objects
if hasattr(obj, "dict"):
try:
return obj.dict()
except Exception:
pass
# Last resort
try:
return str(obj)
except Exception:
return repr(obj)
def serialize_channel_values(channel_values: dict[str, Any]) -> dict[str, Any]:
"""Serialize channel values, stripping internal LangGraph keys.
Internal keys like ``__pregel_*`` and ``__interrupt__`` are removed
to match what the LangGraph Platform API returns.
"""
result: dict[str, Any] = {}
for key, value in channel_values.items():
if key.startswith("__pregel_") or key == "__interrupt__":
continue
result[key] = serialize_lc_object(value)
return result
def serialize_messages_tuple(obj: Any) -> Any:
"""Serialize a messages-mode tuple ``(chunk, metadata)``."""
if isinstance(obj, tuple) and len(obj) == 2:
chunk, metadata = obj
return [serialize_lc_object(chunk), metadata if isinstance(metadata, dict) else {}]
return serialize_lc_object(obj)
def serialize(obj: Any, *, mode: str = "") -> Any:
"""Serialize LangChain objects with mode-specific handling.
* ``messages`` — obj is ``(message_chunk, metadata_dict)``
* ``values`` — obj is the full state dict; ``__pregel_*`` keys stripped
* everything else — recursive ``model_dump()`` / ``dict()`` fallback
"""
if mode == "messages":
return serialize_messages_tuple(obj)
if mode == "values":
return serialize_channel_values(obj) if isinstance(obj, dict) else serialize_lc_object(obj)
return serialize_lc_object(obj)