Files
deerflow-factory/deer-flow/scripts/wizard/steps/llm.py
DATA 6de0bf9f5b Initial commit: hardened DeerFlow factory
Vendored deer-flow upstream (bytedance/deer-flow) plus prompt-injection
hardening:

- New deerflow.security package: content_delimiter, html_cleaner,
  sanitizer (8 layers — invisible chars, control chars, symbols, NFC,
  PUA, tag chars, horizontal whitespace collapse with newline/tab
  preservation, length cap)
- New deerflow.community.searx package: web_search, web_fetch,
  image_search backed by a private SearX instance, every external
  string sanitized and wrapped in <<<EXTERNAL_UNTRUSTED_CONTENT>>>
  delimiters
- All native community web providers (ddg_search, tavily, exa,
  firecrawl, jina_ai, infoquest, image_search) replaced with hard-fail
  stubs that raise NativeWebToolDisabledError at import time, so a
  misconfigured tool.use path fails loud rather than silently falling
  back to unsanitized output
- Native client back-doors (jina_client.py, infoquest_client.py)
  stubbed too
- Native-tool tests quarantined under tests/_disabled_native/
  (collect_ignore_glob via local conftest.py)
- Sanitizer Layer 7 fix: only collapse horizontal whitespace, preserve
  newlines and tabs so list/table structure survives
- Hardened runtime config.yaml references only the searx-backed tools
- Factory overlay (backend/) kept in sync with deer-flow tree as a
  reference / source

See HARDENING.md for the full audit trail and verification steps.
2026-04-12 14:23:57 +02:00

77 lines
2.1 KiB
Python

"""Step 1: LLM provider selection."""
from __future__ import annotations
from dataclasses import dataclass
from wizard.providers import LLM_PROVIDERS, LLMProvider
from wizard.ui import (
ask_choice,
ask_secret,
ask_text,
print_header,
print_info,
print_success,
)
@dataclass
class LLMStepResult:
provider: LLMProvider
model_name: str
api_key: str | None
base_url: str | None = None
def run_llm_step(step_label: str = "Step 1/3") -> LLMStepResult:
print_header(f"{step_label} · Choose your LLM provider")
options = [f"{p.display_name} ({p.description})" for p in LLM_PROVIDERS]
idx = ask_choice("Enter choice", options)
provider = LLM_PROVIDERS[idx]
print()
# Model selection (show list, default to first)
if len(provider.models) > 1:
print_info(f"Available models for {provider.display_name}:")
model_idx = ask_choice("Select model", provider.models, default=0)
model_name = provider.models[model_idx]
else:
model_name = provider.models[0]
print()
base_url: str | None = None
if provider.name in {"openrouter", "vllm"}:
base_url = provider.extra_config.get("base_url")
if provider.name == "other":
print_header(f"{step_label} · Connection details")
base_url = ask_text("Base URL (e.g. https://api.openai.com/v1)", required=True)
model_name = ask_text("Model name", default=provider.default_model)
elif provider.auth_hint:
print_header(f"{step_label} · Authentication")
print_info(provider.auth_hint)
api_key = None
return LLMStepResult(
provider=provider,
model_name=model_name,
api_key=api_key,
base_url=base_url,
)
print_header(f"{step_label} · Enter your API Key")
if provider.env_var:
api_key = ask_secret(f"{provider.env_var}")
else:
api_key = None
if api_key:
print_success(f"Key will be saved to .env as {provider.env_var}")
return LLMStepResult(
provider=provider,
model_name=model_name,
api_key=api_key,
base_url=base_url,
)