mirror of
https://github.com/bytedance/deer-flow.git
synced 2026-04-26 11:48:10 +00:00
- Freeze all config models (AppConfig + 15 sub-configs) with frozen=True - Purify from_file() — remove 9 load_*_from_dict() side-effect calls - Replace mtime/reload/push/pop machinery with single ContextVar + init_app_config() - Delete 10 sub-module globals and their getters/setters/loaders - Migrate 50+ consumers from get_*_config() to get_app_config().xxx - Expand DeerFlowContext: app_config + thread_id + agent_name (frozen dataclass) - Wire into Gateway runtime (worker.py) and DeerFlowClient via context= parameter - Remove sandbox_id from runtime.context — flows through ThreadState.sandbox only - Middleware/tools access runtime.context directly via Runtime[DeerFlowContext] generic - resolve_context() retained at server entry points for LangGraph Server fallback
19 lines
768 B
Python
19 lines
768 B
Python
from types import SimpleNamespace
|
|
|
|
import pytest
|
|
|
|
from deerflow.config.app_config import AppConfig
|
|
from deerflow.skills.security_scanner import scan_skill_content
|
|
|
|
|
|
@pytest.mark.anyio
|
|
async def test_scan_skill_content_blocks_when_model_unavailable(monkeypatch):
|
|
config = SimpleNamespace(skill_evolution=SimpleNamespace(moderation_model_name=None))
|
|
monkeypatch.setattr(AppConfig, "current", staticmethod(lambda: config))
|
|
monkeypatch.setattr("deerflow.skills.security_scanner.create_chat_model", lambda **kwargs: (_ for _ in ()).throw(RuntimeError("boom")))
|
|
|
|
result = await scan_skill_content("---\nname: demo-skill\ndescription: demo\n---\n", executable=False)
|
|
|
|
assert result.decision == "block"
|
|
assert "manual review required" in result.reason
|